Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- package As1
- import scala.io._
- import scala.collection.mutable.{MStack => Stack}
- object NFA {
- import FiniteAutomata._
- type TransitionFunction = (State, Symbol) => Set[State]
- //"five-tuple" definition for an NFA.
- case class Definition(states: Set[State],
- inputSymbols: Set[Symbol],
- startState: State,
- transitions: Set[(State, Symbol, Set[State])],
- acceptingStates: Set[State])
- /**
- First line - input symbols
- Second line - states in the NFA
- Third line - start state
- Fourth line - accepting states
- Subsequent lines - transitions
- I have made the assumption that input symbols will be single characters
- */
- def fromFile(filename: String): Definition = {
- type Transition = (State, Symbol, Set[State])
- def toTransition(tokens: Seq[Symbol]): Transition = {
- (State(tokens(0)),
- if (tokens(1) == 'epsilon) 'ε else tokens(1),
- tokens.drop(2).map(State).toSet)
- }
- var inputSymbols:Set[Symbol] = null
- var states:Set[State] = null
- var startState:State = null
- var acceptingStates:Set[State] = null
- var transitions:Set[(State, Symbol, Set[State])] = Set.empty
- Source.fromFile(filename).getLines.withFilter(!_.isEmpty).zipWithIndex.foreach((e) => {
- e match {
- case (line, 0) => inputSymbols = tokenize(line).toSet
- case (line, 1) => states = tokenize(line).map(State).toSet
- //If there is more than one start, this simply selects the first
- case (line, 2) => startState = State(tokenize(line)(0))
- case (line, 3) => acceptingStates = tokenize(line).map(State).toSet
- case (line, _) => {
- val newTransitions = Set(toTransition(tokenize(line)))
- transitions = transitions union newTransitions
- }
- }
- })
- Definition(states, inputSymbols, startState, transitions, acceptingStates)
- }
- //TODO: test, write in a functional style
- def epsilonClosure(ss: Set[State], trans: TransitionFunction):Set[State] = {
- val stack = MStack(ss.toSeq:_*)
- var result = ss
- while (!stack.isEmpty) {
- var currentState = stack.pop
- trans(currentState, 'ε).foreach((u) => {
- if (!epsilonClosure(ss, trans).contains(u)) {
- result = result + u
- stack.push(u)
- }
- })
- }
- return result
- }
- private def tokenize(str: String): Seq[Symbol] = {
- str.replace(",", "").split(" ").map(Symbol(_))
- }
- }
Add Comment
Please, Sign In to add comment