Advertisement
YahiaOsama

Untitled

Apr 20th, 2019
100
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.85 KB | None | 0 0
  1. import argparse
  2. from collections import deque
  3. from antlr4 import *
  4. from milestone_2Lexer import milestone_2Lexer
  5. from milestone_2Listener import milestone_2Listener
  6. from antlr4.error.ErrorListener import *
  7. from antlr4.error.Errors import RecognitionException
  8. from milestone_2Parser import milestone_2Parser
  9. from antlr4.tree.Trees import Trees
  10. from antlr4.Token import CommonToken
  11.  
  12. class NimSyntaxError(Exception):
  13. def __init__(self, symbol, line, column, msg):
  14. self.symbol = symbol
  15. self.line = line
  16. self.column = column
  17. self.msg = "At line %d, column %d: %s" % (line, column, msg)
  18.  
  19. class NimErrorListener(ErrorListener):
  20. def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
  21. e = NimSyntaxError(offendingSymbol, line, column, msg)
  22. raise e
  23. print(e.msg)
  24.  
  25. class NimListener(milestone_2Listener):
  26. def enterCondStmt(self, ctx):
  27. #print("HELLO?")
  28. #print(ctx.getText())
  29. pass
  30.  
  31. def enterIfStmt(self, ctx):
  32. #sprint("IF: %s" % ctx.getText())
  33. pass
  34.  
  35. def enterEveryRule(self, ctx):
  36. #print("ENTERED RULE MAN")
  37. pass
  38.  
  39. class NimLexer(milestone_2Lexer):
  40. def __init__(self, input, output=None):
  41. super().__init__(input, output)
  42. self.token_queue = deque()
  43. self.indent_stack = [0]
  44. self.last_token = None
  45.  
  46. def insertIndent(self, indent_level):
  47. indent_token = self.createToken(milestone_2Parser.INDENT, " " * indent_level)
  48. self.indent_stack.append(indent_level)
  49. self.token_queue.append(indent_token)
  50.  
  51. def insertDedent(self):
  52. dedent_token = self.createToken(milestone_2Parser.DEDENT, "")
  53. self.token_queue.append(dedent_token)
  54. self.indent_stack.pop()
  55.  
  56. def insertNewline(self):
  57. newline_token = self.createToken(milestone_2Parser.NEWLINE, "\r\n")
  58. self.token_queue.append(newline_token)
  59.  
  60. def nextToken(self):
  61. while not self.token_queue:
  62. token = super().nextToken()
  63. if token.type in [self.NEWLINE, self.COMMENT, self.DOCUMENTATION_COMMENTS]:
  64. print("Token: %s " % get_token_type(token))
  65. if token.type != self.NEWLINE:
  66. self.insertNewline()
  67. indent_level = token.text.count(" ")
  68. if indent_level > self.indent_stack[-1]:
  69. self.insertIndent(indent_level)
  70. elif indent_level < self.indent_stack[-1]:
  71. while self.indent_stack and self.indent_stack[-1] > indent_level:
  72. self.insertDedent()
  73. else:
  74. self.token_queue.append(token)
  75. elif token.type == token.EOF:
  76. # Encountered EOF, need to dedent all indents
  77. while self.indent_stack and self.indent_stack[-1] > 0:
  78. self.insertDedent()
  79. self.token_queue.append(token)
  80. else:
  81. self.token_queue.append(token)
  82. return self.token_queue.popleft()
  83.  
  84. def createToken(self, type, text):
  85. stop = self.getCharIndex()-1
  86. start = stop if text == "" else stop - len(text) + 1
  87. return CommonToken(
  88. self._tokenFactorySourcePair,
  89. type,
  90. self.DEFAULT_TOKEN_CHANNEL,
  91. start, stop
  92. )
  93.  
  94.  
  95. def get_token_type(token):
  96. if token.type == milestone_2Parser.EOF:
  97. return "EOF"
  98. return milestone_2Parser.symbolicNames[token.type]
  99.  
  100.  
  101. def print_tokens(input_stream):
  102. input_stream.seek(0)
  103. lexer = NimLexer(input_stream)
  104. token_stream = CommonTokenStream(lexer)
  105. parser = milestone_2Parser(token_stream)
  106. token = lexer.nextToken()
  107. while token.type != token.EOF:
  108. print(get_token_type(token))
  109. token = lexer.nextToken()
  110.  
  111. def check_validity(input_stream):
  112. input_stream.seek(0)
  113. lexer = NimLexer(input_stream)
  114. token_stream = CommonTokenStream(lexer)
  115. parser = milestone_2Parser(token_stream)
  116. parser.removeErrorListeners()
  117. parser.addErrorListener(NimErrorListener())
  118. walker = ParseTreeWalker()
  119. output = "valid"
  120. try:
  121. tree = parser.start()
  122. walker.walk(NimListener(),tree)
  123. except Exception as e:
  124. print("Nim Syntax Error: %s" % e.msg)
  125. output = "invalid"
  126. print(output)
  127.  
  128.  
  129. def main():
  130. with open(args.file, "r") as file:
  131. lines = file.read()
  132. input_stream = InputStream(lines)
  133. print_tokens(input_stream)
  134. check_validity(input_stream)
  135.  
  136. if __name__ == '__main__':
  137. parser = argparse.ArgumentParser(add_help=True, description='Sample Commandline')
  138.  
  139. parser.add_argument('--file', action="store", help="path of file to take as input", nargs="?", metavar="file")
  140.  
  141. args = parser.parse_args()
  142.  
  143. main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement