How to use TOKEN_VALUE method in ng-mocks

Best JavaScript code snippet using ng-mocks

mparser.py

Source:mparser.py Github

copy

Full Screen

1"""2©Pulzar 2018-203#Author : Brian Turza4version: 0.45#Created : 14/9/2019 (this version)6"""7from Lib.math.main import *8import numpy as np9import re10class Parser:11 def __init__(self, token_stream, include):12 self.tokens = token_stream13 self.include = include14 self.ast = {'main_scope': []}15 self.symbol_table = []16 self.isConsole = True17 self.lines = 118 self.token_index = 019 self.nesting_count = 020 self.error = False21 def parse(self, token_stream):22 """23 This function takes tokens from lexer and procces them #TODO24 """25 count = 026 while self.token_index < len(token_stream):27 token_type = self.tokens[self.token_index][0]28 token_value = self.tokens[self.token_index][1]29 # If token == echo add tokens to parse_include()30 if self.error:31 return [self.ast, self.isConsole, self.error]32 if token_type == "KEYWORD" and token_value == "include":33 self.parse_include(token_stream[self.token_index:len(token_stream)], False)34 elif token_type == "KEYWORD" and token_value == "Program":35 self.parse_program(token_stream[self.token_index:len(token_stream)], False)36 count += 137 elif token_type == "DATATYPE":38 self.parse_decl_variable(token_stream[self.token_index:len(token_stream)], False)39 # Check if it was already dececlared40 elif token_type == "BUILT_IN_FUNCTION":41 self.parse_builtin(token_stream[self.token_index:len(token_stream)], False)42 elif token_type == "MATH_FUNCTION":43 self.parse_math(token_stream[self.token_index:len(token_stream)], False)44 elif token_type == "KEYWORD" and token_value == "if" or token_value == "else" or token_value == "elseif":45 self.parse_conditional_statements(token_stream[self.token_index:len(token_stream)], False)46 elif token_type == "KEYWORD" and token_value == "for":47 self.parse_loop(token_stream[self.token_index:len(token_stream)], False)48 elif token_type == "KEYWORD" and token_value == "while":49 self.parse_loop(token_stream[self.token_index:len(token_stream)], False)50 elif token_type == "KEYWORD" and token_value == "func":51 self.parse_func(token_stream[self.token_index:len(token_stream)], False)52 elif token_type == "KEYWORD" and token_value == "class":53 self.parse_class(token_stream[self.token_index:len(token_stream)], False)54 elif token_type == "COMMENT" and token_value == r"\\":55 self.parse_single_line_comment(token_stream[self.token_index:len(token_stream)], False)56 elif token_type == "COMMENT" and token_value == "|**":57 self.parse_multi_line_comment(token_stream[self.token_index:len(token_stream)], False)58 elif token_type == "KEYWORD" and token_value == "macros":59 self.parse_macros(token_stream[self.token_index:len(token_stream)])60 elif token_type == "KEYWORD" and token_value == "match":61 self.parse_match(token_stream[self.token_index:len(token_stream)], False)62 elif token_type == "NEWLINE": self.lines += 163 try: # If last token pass to this, it would throw error64 if token_type == "IDENTIFIER" and token_stream[self.token_index + 1][0] == "COLON":65 self.call_func(token_stream[self.token_index:len(token_stream)], False)66 except:67 pass68 try:69 if token_type == "IDENTIFIER" and self.tokens[self.token_index + 1][1] == "=" or token_type == "IDENTIFIER" and self.tokens[self.token_index + 1][0] == "INCREMENT_OPERATOR":70 self.parse_variable(token_stream[self.token_index:len(token_stream)], False)71 except IndexError: pass72 if token_type == "UNDEFINIED":73 # TODO Identify better errors74 self.error_message("SyntaxError: \n Undefinied")75 self.token_index += 176 # If no Program declaration is found in code, calls a error message77 if count == 0:78 msg = "SyntaxError at line {}:\nProgram must be definied".format(self.lines)79 self.error_message(msg, token_stream, self.token_index)80 return [self.ast, self.isConsole, self.error]81 def parse_include(self, token_stream, inScope):82 tokens_checked = 083 lib = ""84 ast = {'Include': []}85 for token in range(0, len(token_stream)):86 token_type = token_stream[tokens_checked][0]87 token_value = token_stream[tokens_checked][1]88 if token_type in ["SEMIC"]: break89 if token == 1 and token_type != "STRING":90 lib = "Lib.{}.main".format(token_value)91 try:92 import importlib93 importlib.import_module(lib)94 except ImportError:95 msg = "IncludeError at line {}:\n'{}' isnt recognized as libary or pulzar file".format(self.lines, token_value)96 self.error_message(msg, token_stream, token)97 elif token == 1 and token_type == "STRING":98 lib = token_value99 tokens_checked += 1100 ast['Include'].append({'libary': lib})101 if inScope == False:102 self.ast['main_scope'].append(ast)103 self.token_index += tokens_checked104 return [ast, tokens_checked]105 def parse_math(self, token_stream, inScope):106 value = ""107 tokens_checked = 0108 ast = {'math': []}109 for token in range(0, len(token_stream)):110 token_type = token_stream[tokens_checked][0]111 token_value = token_stream[tokens_checked][1]112 if token_type == "SEMIC": break113 if token == 0: ast['math'].append({'function': token_value})114 if token == 1 and token_type in ["INT", "ID"]:115 value = token_value116 elif token == 1 and token_type not in ["INTEGER", "IDENTIFIER"]:117 msg = "Error: '" + token_value + "' must be int"118 self.error_message(msg, token_stream, token)119 elif token > 1 and token % 2 == 0:120 value += token_value121 tokens_checked += 1122 ast['math'].append({'arguments': value})123 if inScope == False:124 self.ast['main_scope'].append(ast)125 self.token_index += tokens_checked126 return [ast, tokens_checked]127 def parse_program(self, token_stream, inScope):128 tokens_checked = 0129 ast = {'program': []}130 for token in range(0, len(token_stream)):131 token_type = token_stream[tokens_checked][0]132 token_value = token_stream[tokens_checked][1]133 if token_type == "SEMIC": break134 elif token == 1 and token_value in ["Program", "Console", "Browser"]:135 ast['program'].append({'type': token_value})136 if token_value == "Browser":137 self.isConsole = False138 elif token == 1 and token_value not in ["Program", "Console", "Browser"]:139 self.error_message("Program error: undefinied program '{}'".format(token_value))140 tokens_checked += 1141 if inScope == False:142 self.ast['main_scope'].append(ast)143 self.token_index += tokens_checked144 return [ast, tokens_checked]145 def parse_decl_variable(self, token_stream, inScope):146 tokens_checked = 0147 ast = {'variable_declaration': []}148 value = ""149 typ8 = ""150 c = False151 var_decl = False152 square_root = False153 dots = False154 for token in range(0, len(token_stream)):155 token_type = token_stream[tokens_checked][0]156 token_value = token_stream[tokens_checked][1]157 # If semic is found loop breaks158 if token_type in ["SEMIC", "NEWLINE"]:159 break160 elif token == 0 and token_stream[2][0] == "SEMIC":161 ast['variable_declaration'].append({'type': token_value})162 typ8 = token_value163 ast['variable_declaration'].append({'name': token_stream[1][1]})164 if token == 0 and token_value in ["var", "int", "float"]:165 ast['variable_declaration'].append({'value': '0'})166 elif token == 0 and token_value == "complex":167 ast['variable_declaration'].append({'value': 'Complex()'})168 elif token == 0 and token_value == "bool":169 ast['variable_declaration'].append({'value': 'None'})170 elif token == 0 and token_value == "str":171 ast['variable_declaration'].append({'value': '""'})172 elif token == 0 and token_value == "char":173 ast['variable_declaration'].append({'value': "''"})174 var_decl = True175 break176 elif token == 0 and token_stream[2][0] != "SEMIC":177 ast['variable_declaration'].append({'type': token_value})178 typ8 = token_value179 elif token == 1 and token_type == "IDENTIFIER":180 ast['variable_declaration'].append({'name': token_value})181 elif token == 1 and token_type != "IDENTIFIER":182 msg = "SyntaxError at line"+ str(self.lines) +":\nInvalid variable name '" + token_value + "'"183 self.error_message(msg, token_stream, token)184 elif token == 2 and token_type not in ["OPERATOR", "INCREMENT_OPERATOR"]:185 msg = "SyntaxError at line {}\n:Invalid operator '{}'".format(self.lines, token_value)186 self.error_message(msg, token_stream, token)187 elif token == 3 and token_type == "IDENTIFIER" and token_value not in constants and token_stream[tokens_checked + 1][1] != ":":188 value = str(token_value)189 elif token == 3 and token_type == "IDENTIFIER" and token_value in constants:190 value = "constants['{}']".format(token_value)191 elif token == 3 and token_type == "STRING":192 value = token_value.replace('\s', ' ')193 elif token == 3 and token_type == "COMPLEX_NUMBER":194 value = str(token_value) + "j"195 c = True196 elif token == 3 and token_type == "SQUARE_ROOT":197 if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):198 token_value = self.get_token_value(token_value)199 if token_value[len(token_value) - 1] in ["i", "j"]:200 value = str(np.sqrt(complex(token_value)))201 else:202 value = str(np.sqrt(float(token_value)))203 elif token == 3 and token_type not in ["COMPLEX_NUMBER", "STRING", "FACTORIAL"]:204 value = str(token_value)205 elif token > 3 and token_type not in ["COMPLEX_NUMBER", "FACTORIAL", "OPERATOR", "SQUARE_ROOT", "IDENTIFIER", "ELLIPSIS_OPERATOR"]:206 value += str(token_value)207 elif token > 3 and token_type == "OPERATOR":208 value += str(token_value.replace('^', '**'))209 elif token > 3 and token_type == "ELLIPSIS_OPERATOR":210 value += str(token_value)211 dots = True212 elif token == 3 and token_type == "FACTORIAL":213 math = MathModule()214 value = str(math.factorial(int(token_value)))215 elif token > 3 and token_type == "COMPLEX_NUMBER":216 value += str(token_value) + "j"217 c = True218 elif token > 3 and token_type == "FACTORIAL":219 math = MathModule()220 value += str(math.factorial(int(token_value)))221 elif token > 3 and token_type == "IDENTIFIER" and token_value in constants:222 value += "constants['{}']".format(token_value)223 elif token > 3 and token_type == "IDENTIFIER":224 value += str(token_value)225 elif token > 3 and token_type == "SQUARE_ROOT":226 if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):227 token_value = self.get_token_value(token_value)228 if token_value[len(token_value) - 1] in ["i", "j"]:229 value += str(np.sqrt(complex(token_value)))230 else:231 value += str(np.sqrt(float(token_value)))232 elif token >= 3 and token_type in ["DATATYPE", "KEYWORD"]:233 msg = "SyntaxError at line "+ str(self.lines) +":\nInvalid variable value '" + token_value + "'"234 self.error_message(msg, token_stream, token)235 tokens_checked += 1236 if dots:237 value = str(self.get_tokens_range(value))238 #----------------------------------------------------------239 #TYPE CHECKING & EVALUATION:240 def type_check(value):241 string = True242 if "[" in value and "]" in value:243 return244 if re.match("[0-9]", value) or value in ["True", "False", "None"] or "constants" in value:245 string = False246 if typ8 == "str" and string:247 value = str(value)248 elif typ8 == "str" and string == False:249 msg = "TypeError at line %s:\nDeclared wrong data type, %s is not string" % (self.lines, value)250 self.error_message(msg, token_stream, token)251 if typ8 == "char" and string and len(value) == 1:252 value = str(value)253 elif typ8 == "char" and string == False or typ8 == "char" and len(value) > 3:254 msg = "TypeError at line %s:\nDeclared wrong data type, %s is not char" % (self.lines, value)255 self.error_message(msg, token_stream, token)256 if typ8 == "int" and string == False and value not in ["True", "False", "None"]:257 try:258 value = eval(value)259 value = int(value)260 except NameError:261 pass262 elif typ8 == "int" and string == True or typ8 == "int" and value in ["True", "False", "None"]:263 msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not integer" % (self.lines, value)264 self.error_message(msg, token_stream, token)265 if typ8 == "float" and string == False and value not in ["True", "False", "None"]:266 try:267 value = eval(value)268 value = float(value)269 except NameError:270 pass271 elif typ8 == "float" and string == True or typ8 == "float" and value in ["True", "False", "None"]:272 msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not float" % (self.lines, value)273 self.error_message(msg, token_stream, token)274 if typ8 == "complex" and string == False and value not in ["True", "False", "None"]:275 try:276 value = eval(value)277 value = 'Complex({}, {})'.format(value.real, value.imag)278 except NameError:279 pass280 elif typ8 == "complex" and string == True or typ8 == "complex" and value in ["True", "False", "None"]:281 msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not complex number" % (282 self.lines, value)283 self.error_message(msg, token_stream, token)284 if typ8 == "bool" and value in ["True", "False", "None"]:285 try:286 value = bool(value)287 except NameError:288 pass289 elif typ8 == "bool" and value not in ["True", "False", "None"]:290 msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not boolean" % (self.lines, value)291 self.error_message(msg, token_stream, token)292 if var_decl == False:293 string = True294 type_check(value)295 #---------------------------------------------------------296 if var_decl == False:297 ast['variable_declaration'].append({'value': value})298 if inScope == False:299 self.ast['main_scope'].append(ast)300 self.symbol_table.append([ast['variable_declaration'][0]['type'], ast['variable_declaration'][1]['name'], ast['variable_declaration'][2]['value']]) # type, name, value301 self.token_index += tokens_checked302 return [ast, tokens_checked]303 def parse_variable(self, token_stream, inScope):304 tokens_checked = 0305 ast = {'variable_declaration': []}306 value = ""307 typ8 = ""308 c = False309 var_decl = False310 square_root = False311 dots = False312 for token in range(0, len(token_stream)):313 token_type = token_stream[tokens_checked][0]314 token_value = token_stream[tokens_checked][1]315 # If semic is found loop breaks316 if token_type in ["SEMIC", "NEWLINE"]:317 break318 elif token == 0 and token_type == "IDENTIFIER":319 typ8 = self.get_token_type(token_value)320 ast['variable_declaration'].append({'type': typ8})321 ast['variable_declaration'].append({'name': token_value})322 elif token == 0 and token_type != "IDENTIFIER":323 msg = ("SyntaxError at line "+ str(self.lines) +"\nInvalid variable name '" + token_value + "'")324 self.error_message(msg, token_stream, token)325 elif token == 1 and token_type not in ["OPERATOR", "INCREMENT_OPERATOR"]:326 msg = "SyntaxError at line {}:\nInvalid operator '{}'".format(self.lines, token_value)327 self.error_message(msg, token_stream, token)328 elif token == 2 and token_type == "IDENTIFIER" and token_value not in constants and token_stream[tokens_checked + 1][1] != ":":329 value = str(token_value)330 elif token == 2 and token_type == "IDENTIFIER" and token_value in constants:331 value = "constants['{}']".format(token_value)332 elif token == 2 and token_type == "STRING":333 value = token_value.replace('\s', ' ')334 elif token == 2 and token_type == "COMPLEX_NUMBER":335 value = str(token_value) + "j"336 c = True337 elif token == 2 and token_type == "SQUARE_ROOT":338 if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):339 token_value = self.get_token_value(token_value)340 if token_value[len(token_value) - 1] in ["i", "j"]:341 value = str(np.sqrt(complex(token_value)))342 else:343 value = str(np.sqrt(float(token_value)))344 elif token == 2 and token_type not in ["COMPLEX_NUMBER", "STRING", "FACTORIAL"]:345 value = str(token_value)346 elif token > 2 and token_type not in ["COMPLEX_NUMBER", "FACTORIAL", "OPERATOR", "SQUARE_ROOT", "ELLIPSIS_OPERATOR"]:347 value += str(token_value)348 elif token > 2 and token_type == "OPERATOR":349 value += str(token_value.replace('^', '**'))350 elif token > 2 and token_type == "ELLIPSIS_OPERATOR":351 value += str(token_value)352 dots = True353 elif token == 2 and token_type == "FACTORIAL":354 math = MathModule()355 value = str(math.factorial(int(token_value)))356 elif token > 2 and token_type == "COMPLEX_NUMBER":357 value += str(token_value) + "j"358 c = True359 elif token > 2 and token_type == "FACTORIAL":360 math = MathModule()361 value += str(math.factorial(int(token_value)))362 elif token > 2 and token_type == "IDENTIFIER" and token_value in constants:363 value += "constants['{}']".format(token_value)364 elif token > 2 and token_type == "SQUARE_ROOT":365 if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):366 token_value = self.get_token_value(token_value)367 if token_value[len(token_value) - 1] in ["i", "j"]:368 value += str(np.sqrt(complex(token_value)))369 else:370 value += str(np.sqrt(float(token_value)))371 tokens_checked += 1372 if dots:373 value = str(self.get_tokens_range(value))374 #TYPE CHECKING & EVALUATION:375 #----------------------------------------------------------376 string = True377 def type_check(value):378 if re.match("[0-9]", value) or value in ["True", "False", "None"]:379 string = False380 if typ8 == "str" and string:381 value = str(value)382 elif typ8 == "str" and string == False:383 msg = "TypeError at line %s:\nDeclared wrong data type, %s is not string" % (self.lines, value)384 self.error_message(msg, token_stream, token)385 if typ8 == "char" and string and len(value) == 1:386 value = str(value)387 elif typ8 == "char" and string == False or typ8 == "char" and len(value) > 3:388 msg = "TypeError at line %s:\nDeclared wrong data type, %s is not char" % (self.lines, value)389 self.error_message(msg, token_stream, token)390 if typ8 == "int" and string == False and value not in ["True", "False", "None"]:391 try:392 value = eval(value)393 value = int(value)394 except NameError:395 pass396 elif typ8 == "int" and string == True or typ8 == "int" and value in ["True", "False", "None"]:397 msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not integer" % (self.lines, value)398 self.error_message(msg, token_stream, token)399 if typ8 == "float" and string == False and value not in ["True", "False", "None"]:400 try:401 value = eval(value)402 value = float(value)403 except NameError:404 pass405 elif typ8 == "float" and string == True or typ8 == "float" and value in ["True", "False", "None"]:406 msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not float" % (self.lines, value)407 self.error_message(msg, token_stream, token)408 if typ8 == "bool" and value in ["True", "False", "None"]:409 try:410 value = bool(value)411 except NameError:412 pass413 elif typ8 == "bool" and value not in ["True", "False", "None"]:414 msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not boolean" % (self.lines, value)415 self.error_message(msg, token_stream, token)416 #---------------------------------------------------------417 if var_decl == False:418 ast['variable_declaration'].append({'value': value})419 if inScope == False:420 self.ast['main_scope'].append(ast)421 for i in self.symbol_table:422 if i[1] == ast['variable_declaration'][1]['name']:423 #Change delcared varaible value to this one424 i[2] = ast['variable_declaration'][2]['value']425 self.token_index += tokens_checked426 return [ast, tokens_checked]427 def get_scope(self, token_stream):428 nesting_count = 1429 tokens_checked = 0430 scope_tokens = []431 for token in token_stream:432 tokens_checked += 1433 token_value = token[1]434 token_type = token[0]435 if token_type == "SCOPE_DEFINIER" and token_value == "{":436 nesting_count += 1437 elif token_type == "SCOPE_DEFINIER" and token_value == "}":438 nesting_count -= 1439 if nesting_count == 0:440 scope_tokens.append(token)441 break442 else:443 scope_tokens.append(token)444 return [scope_tokens, tokens_checked]445 def parse_scope(self, token_stream, statement_ast, astName, isNested, macros, match_case=False):446 ast = {'scope': []}447 tokens_checked = 0448 lines = 1449 while tokens_checked < len(token_stream):450 token_type = token_stream[tokens_checked][0]451 token_value = token_stream[tokens_checked][1]452 if match_case:453 case = self.parse_case(token_stream[tokens_checked + 1:len(token_stream)])454 ast['scope'].append(case[0])455 tokens_checked += case[1]456 # If token is echo add tokens to parse_include()457 if token_type == "KEYWORD" and token_value == "include":458 include = self.parse_include(token_stream[tokens_checked:len(token_stream)])459 ast['scope'].append(include[0])460 tokens_checked += include[1]461 elif token_type == "DATATYPE":462 var = self.parse_decl_variable(token_stream[tokens_checked:len(token_stream)], True)463 ast['scope'].append(var[0])464 tokens_checked += var[1]465 elif token_type == "IDENTIFIER" and token_stream[tokens_checked + 1][1] == "=" or token_type == "IDENTIFIER" and token_stream[tokens_checked + 1][0] == "INCREMENT_OPERATOR":466 varx = self.parse_variable(token_stream[tokens_checked:len(token_stream)], True)467 ast['scope'].append(varx[0])468 tokens_checked += varx[1]469 elif token_type == "BUILT_IN_FUNCTION":470 builtin = self.parse_builtin(token_stream[tokens_checked:len(token_stream)], True)471 ast['scope'].append(builtin[0])472 tokens_checked += builtin[1]473 elif token_type == "MATH_FUNCTION":474 math = self.parse_math(token_stream[tokens_checked:len(token_stream)], True)475 ast['scope'].append(math[0])476 tokens_checked += math[1]477 elif token_type == "KEYWORD" and token_value == "if" or token_value == "else" or token_value == "elseif":478 condtitional = self.parse_conditional_statements(token_stream[tokens_checked:len(token_stream)], True)479 ast['scope'].append(condtitional[0])480 tokens_checked += condtitional[1] - 1481 elif token_type == "KEYWORD" and token_value == "for":482 loop = self.parse_loop(token_stream[tokens_checked:len(token_stream)], True)483 ast['scope'].append(loop[0])484 tokens_checked += loop[1]485 elif token_type == "KEYWORD" and token_value == "while":486 loop = self.parse_loop(token_stream[tokens_checked:len(token_stream)], True)487 ast['scope'].append(loop[0])488 tokens_checked += loop[1]489 elif token_type == "KEYWORD" and token_value == "func":490 function = self.parse_func(token_stream[tokens_checked:len(token_stream)], True)491 ast['scope'].append(function[0])492 tokens_checked += function[1]493 elif token_type == "KEYWORD" and token_value == "return":494 return_statement = self.parse_return(token_stream[tokens_checked:len(token_stream)], True)495 ast['scope'].append(return_statement[0])496 tokens_checked += return_statement[1]497 elif token_type == "COMMENT" and token_value == r"\\":498 comment = self.parse_single_line_comment(token_stream[tokens_checked:len(token_stream)], True)499 ast['scope'].append(comment[0])500 tokens_checked += comment[1]501 elif token_type == "COMMENT" and token_value == "|**":502 comment = self.parse_multi_line_comment(token_stream[tokens_checked:len(token_stream)], True)503 ast['scope'].append(comment[0])504 tokens_checked += comment[1]505 elif macros == True and token_value == "define":506 define = self.parse_macros_define(token_stream[tokens_checked:len(token_stream)], True)507 ast['scope'].append(define[0])508 tokens_checked += define[1]509 try: # If last token pass to this, it would throw error510 if token_type == "IDENTIFIER" and token_stream[tokens_checked + 1][0] == "COLON":511 run = self.call_func(token_stream[tokens_checked:len(token_stream)], True)512 ast['scope'].append(run[0])513 tokens_checked += run[1]514 except:515 pass516 if token_type == "NEWLINE":517 self.lines += 1518 if token_value == "}":519 self.nesting_count += 1520 tokens_checked += 1521 self.token_index += self.nesting_count + 1522 self.lines -= 1523 statement_ast[astName].append(ast)524 if isNested == False:525 self.ast['main_scope'].append(statement_ast)526 def parse_builtin(self, token_stream, inScope):527 tokens_checked = 0528 value = ""529 ast = {'builtin_function': []}530 execute = False531 dots = False532 for token in range(0, len(token_stream)):533 token_type = token_stream[tokens_checked][0]534 token_value = token_stream[tokens_checked][1]535 if token_type == "SEMIC": break536 if token == 0 and token_type == "BUILT_IN_FUNCTION":537 ast['builtin_function'].append({'function': token_value})538 elif token == 1 and token_type == "IDENTIFIER" and token_value not in constants:539 if token_stream[0][1] == "execute":540 value = self.get_token_value(token_value)541 elif token_stream[0][1] == "input":542 ast['builtin_function'].append({'type' : self.get_token_type(token_value)})543 value = str(token_value)544 else:545 value = str(token_value)546 elif token == 1 and token_type == "IDENTIFIER" and token_value in constants:547 value = "constants['{}']".format(token_value)548 elif token == 1 and token_type not in ["IDENTIFIER", "FACTORIAL", "SQUARE_ROOT"]:549 value = token_value550 elif token == 1 and token_type == "FACTORIAL":551 math = MathModule()552 value = str(math.factorial(int(token_value)))553 elif token == 1 and token_type == "SQUARE_ROOT":554 if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):555 token_value = str(self.get_token_value(token_value))556 if "Complex(" in token_value and ")" in token_value:557 value = str(np.sqrt(token_value))558 else:559 value = str(np.sqrt(float(token_value)))560 elif token > 1 and token_type == "ELLIPSIS_OPERATOR":561 value += str(token_value)562 dots = True563 elif token > 1 and token_type == "FACTORIAL":564 math = MathModule()565 value += str(math.factorial(int(token_value)))566 elif token > 1 and token_type not in ["FACTORIAL", "OPERATOR", "IDENTIFIER"]:567 value += str(token_value)568 elif token > 1 and token_type == "OPERATOR":569 value += str(token_value.replace('^', '**'))570 elif token > 1 and token_type == "IDENTIFIER" and token_value not in constants:571 if token_stream[0][1] == "execute":572 value += self.get_token_value(token_value)573 else:574 value += str(token_value)575 elif token > 1 and token_type == "IDENTIFIER" and token_value in constants:576 value += "constants['{}']".format(token_value)577 tokens_checked += 1578 if dots:579 value = str(self.get_tokens_range(value))580 if type(value) == int:581 value = int(value)582 elif type(value) == float:583 value = float(value)584 elif type(value) == complex:585 fmath = MathModule()586 value = fmath.complex(value)587 ast['builtin_function'].append({'argument': value})588 if inScope == False:589 self.ast['main_scope'].append(ast)590 self.token_index += tokens_checked591 return [ast, tokens_checked]592 def parse_return(self, token_stream, inScope):593 tokens_checked = 0594 value = ""595 ast = {'return': []}596 for token in range(0, len(token_stream)):597 token_type = token_stream[tokens_checked][0]598 token_value = token_stream[tokens_checked][1]599 if token_type == "SEMIC": break600 if token == 1 and token_type == "IDENTIFIER":601 value = token_value602 elif token == 1 and token_type == "IDENTIFIER" and token_stream[tokens_checked + 1][0] == "COLON":603 value = token_value604 elif token == 1 and token_type != "IDENTIFIER":605 value = token_value606 elif token == 1 and token_type == "FACTORIAL":607 math = MathModule()608 value = str(math.factorial(int(token_value)))609 elif token > 1 and token_type == "FACTORIAL":610 math = MathModule()611 value += str(math.factorial(int(token_value)))612 elif token > 1 and token_type != "FACTORIAL":613 value += token_value614 tokens_checked += 1615 if type(value) in [int, float]:616 try:617 value = eval(value)618 except:619 pass620 elif type(value) == float:621 value = float(value)622 elif type(value) == complex:623 try:624 value = complex(value)625 except:626 pass627 ast['return'].append({'argument': value})628 if inScope == False:629 self.ast['main_scope'].append(ast)630 self.token_index += tokens_checked631 return [ast, tokens_checked]632 def parse_conditional_statements(self, token_stream, isNested):633 tokens_checked = 0634 condition = ""635 els = False636 tokens = []637 ast = {'conditional_statement': []}638 for token in range(0, len(token_stream)):639 token_type = token_stream[tokens_checked][0]640 token_value = token_stream[tokens_checked][1]641 if token_type == "SCOPE_DEFINIER" and token_value == "{":642 break643 elif token == 0 and token_value == "if":644 ast['conditional_statement'].append({'keyword': token_value})645 elif token == 0 and token_value == "else":646 ast['conditional_statement'].append({'keyword': token_value})647 els = True648 elif token == 1 and token_type != "FACTORIAL":649 condition = token_value650 elif token == 1 and token_type == "FACTORIAL":651 math = MathModule()652 condition = str(math.factorial(int(token_value)))653 elif token > 1 and token_type == "FACTORIAL":654 math = MathModule()655 condition += str(math.factorial(int(token_value)))656 elif token > 1 and token_type != "FACTORIAL":657 condition += token_value.replace("mod", "%")658 tokens_checked += 1659 if els == False:660 ast['conditional_statement'].append({'condition': condition})661 self.token_index += tokens_checked662 scope_tokens = self.get_scope(token_stream[tokens_checked + 1:len(token_stream)])663 if isNested == False:664 self.parse_scope(scope_tokens[0], ast, 'conditional_statement', False, False)665 else:666 self.parse_scope(scope_tokens[0], ast, 'conditional_statement', True, False)667 tokens_checked += scope_tokens[1]668 return [ast, tokens_checked]669 def get_token_value(self, token):670 for variable in self.symbol_table:671 if variable[1] == token: return variable[2]672 def get_token_type(self, token):673 for variable in self.symbol_table:674 if variable[1] == token: return variable[0]675 def find_token_type(self, token):676 #int677 try:678 token = int(token)679 datatype = 'int'680 except:681 pass682 def get_tokens_range(self, value):683 amount = 0684 if "..." in value:685 value = value.split('...')686 amount = 1687 elif ".." in value:688 value = value.split('..')689 amount = 0690 arr = []691 try:692 value[0], value[1] = int(value[0]), int(value[1])693 for i in range(value[0], value[1] + amount): # startValue to endValue694 arr.append(i)695 except:696 startValue, endValue = value[0].replace("'", "").replace('"', ''), value[1].replace("'", "").replace('"', '')697 for i in range(ord(startValue), ord(endValue) + amount):698 arr.append(chr(i))699 return arr700 def get_token_match(self, start_matcher, end_matcher, token_stream):701 tokens = []702 tokens_checked = 0703 for token in token_stream:704 tokens_checked += 1705 if token[1] == end_matcher:706 return [tokens, tokens_checked - 1]707 else:708 tokens.append(token)709 return False710 def parse_loop(self, token_stream, isNested):711 # for x :: x < 10 :: x++ {712 tokens_checked = 0713 keyword = ""714 condition = ""715 value = ""716 increment = ""717 var_decl = False718 ast = {'loop': []}719 while tokens_checked < len(token_stream):720 token_type = token_stream[tokens_checked][0]721 token_value = token_stream[tokens_checked][1]722 if token_type == "SCOPE_DEFINIER" and token_value == "{":723 break724 if tokens_checked == 0:725 ast['loop'].append({'keyword': token_value})726 keyword = token_value727 if tokens_checked == 1 and keyword == "for":728 tokens = self.get_token_match("::", "{", token_stream)729 inner_tokens = [i[1] for i in tokens[0]]730 if "in" in inner_tokens:731 array = ""732 data_type = self.get_token_type(inner_tokens[3])733 ast['loop'].append({'name': inner_tokens[1]})734 ast['loop'].append({'type': data_type})735 ast['loop'].append({'array': ''.join(inner_tokens[3:])})736 self.symbol_table.append([data_type, inner_tokens[1], inner_tokens[3:]])737 else:738 if len([i for i, x in enumerate(inner_tokens) if x == "::"]) != 2:739 self.error_message("SyntaxError:\nSymbol '::' is missing in a for loop", token_stream, tokens_checked)740 inner_tokens[:] = [x for x in inner_tokens if x != '::']741 ast['loop'].append({'name': inner_tokens[1]})742 ast['loop'].append({'start_value': self.get_token_value(inner_tokens[2])})743 ast['loop'].append({'end_value': inner_tokens[4]})744 if "++" in inner_tokens[5]:745 ast['loop'].append({'increment': "1"})746 elif "--" in inner_tokens[5]:747 ast['loop'].append({'increment': "-1"})748 tokens_checked += tokens[1]749 break750 elif keyword == "while":751 if tokens_checked == 1: condition = token_value752 elif tokens_checked == 2 and token_type != "FACTORIAL":753 condition += token_value754 elif tokens_checked == 2 and token_type == "FACTORIAL":755 math = MathModule()756 condition = str(math.factorial(int(token_value)))757 elif tokens_checked > 2 and token_type == "FACTORIAL":758 math = MathModule()759 condition += str(math.factorial(int(token_value)))760 elif tokens_checked > 2 and token_type != "FACTORIAL":761 condition += token_value.replace("mod", "%")762 tokens_checked += 1763 self.token_index += tokens_checked764 scope_tokens = self.get_scope(token_stream[tokens_checked + 1:len(token_stream)])765 if keyword == "while": ast['loop'].append({'condition': condition})766 if isNested == False:767 self.parse_scope(scope_tokens[0], ast, 'loop', False, False)768 else:769 self.parse_scope(scope_tokens[0], ast, 'loop', True, False)770 tokens_checked += scope_tokens[1]771 return [ast, tokens_checked]772 def parse_func(self, token_stream, isNested):773 tokens_checked = 0774 value = ""775 ast = {'function_declaration': []}776 for token in range(0, len(token_stream)):777 token_type = token_stream[tokens_checked][0]778 token_value = token_stream[tokens_checked][1]779 if token_type == "SCOPE_DEFINIER" and token_value == "{": break780 if token == 1 and token_type in ["IDENTIFIER", "INNER_FUNC"]:781 ast['function_declaration'].append({'name': token_value})782 elif token == 2 and token_type != "COLON":783 msg = "SyntaxError at line "+ str(self.lines) +":\n':' is missing"784 self.error_message(msg, token_stream, token)785 elif token == 3 and token_value == "0":786 value = token_value787 elif token == 3 and token_type in ["IDENTIFIER", "COMMA"]:788 value = token_value789 elif token > 3 and token_type in ["IDENTIFIER", "COMMA"]:790 value += token_value791 tokens_checked += 1792 ast['function_declaration'].append({'argument': value})793 self.token_index += tokens_checked - 1794 scope_tokens = self.get_scope(token_stream[tokens_checked + 1:len(token_stream)])795 if isNested == False:796 self.parse_scope(scope_tokens[0], ast, 'function_declaration', False, False)797 else:798 self.parse_scope(scope_tokens[0], ast, 'function_declaration', True, False)799 tokens_checked += scope_tokens[1]800 self.symbol_table.append(['function', ast['function_declaration'][0]['name'], ast['function_declaration'][1]['argument']])801 return [ast, tokens_checked]802 def parse_class(self, token_stream, isNested):803 tokens_checked = 0804 value = ""805 ast = {'class': []}806 for token in range(0, len(token_stream)):807 token_type = token_stream[tokens_checked][0]808 token_value = token_stream[tokens_checked][1]809 if token_type == "SCOPE_DEFINIER" and token_value == "{": break810 if token == 1 and token_type == "IDENTIFIER":811 ast['class'].append({'name': token_value})812 elif token == 2 and token_type != "COLON":813 msg = f"SyntaxError at line {self.lines}\n':' is missing."814 self.error_message(msg, token_stream, token)815 elif token == 3 and token_value == "object":816 ast['class'].append({'argument': token_value})817 decl = True818 tokens_checked += 1819 scope_tokens = self.get_scope(token_stream[tokens_checked + 1:len(token_stream)])820 self.token_index += tokens_checked - 1821 if isNested == False:822 self.parse_scope(scope_tokens[0], ast, 'class', False, False)823 else:824 self.parse_scope(scope_tokens[0], ast, 'class', True, False)825 tokens_checked += scope_tokens[1]826 self.symbol_table.append(['function', ast['class'][0]['name'], ast['class'][1]['argument']])827 return [ast, tokens_checked]828 def parse_single_line_comment(self, token_stream, inScope):829 tokens_checked = 0830 comment_str = ""831 ast = {'comment': []}832 for token in range(0, len(token_stream)):833 token_type = token_stream[tokens_checked][0]834 token_value = token_stream[tokens_checked][1]835 if token_type == "NEWLINE": break836 if token >= 1:837 comment_str += str(token_value) + " "838 tokens_checked += 1839 ast['comment'].append({'Comment_str': comment_str})840 if inScope == False:841 self.ast['main_scope'].append(ast)842 self.token_index += tokens_checked843 return [ast, tokens_checked]844 def parse_multi_line_comment(self, token_stream, inScope):845 tokens_checked = 0846 comment_str = ""847 ast = {'comment': []}848 for token in range(0, len(token_stream)):849 token_type = token_stream[tokens_checked][0]850 token_value = token_stream[tokens_checked][1]851 if token_type == "COMMENT" and token_value == "**|": break852 if token >= 1:853 comment_str += str(token_value) + " "854 tokens_checked += 1855 ast['comment'].append({'Comment_str': comment_str})856 if inScope == False:857 self.ast['main_scope'].append(ast)858 self.token_index += tokens_checked859 return [ast, tokens_checked]860 def parse_match(self, token_stream, isNested):861 """862 var stdin;863 input stdin;864 match stdin {865 1 -> echo "One";866 }867 """868 tokens_checked = 0869 ast = {'match': []}870 scope_ast = {'scope': []}871 for token in range(0, len(token_stream)):872 token_type = token_stream[tokens_checked][0]873 token_value = token_stream[tokens_checked][1]874 if token_type == "SCOPE_DEFINIER" and token_value == "{": break875 if token == 1:876 ast['match'].append({'variable': token_value})877 tokens_checked += 1878 self.token_index += tokens_checked - 1879 scope_tokens = self.get_scope(token_stream[tokens_checked + 1:len(token_stream)])880 if isNested == False:881 self.parse_scope(scope_tokens[0], ast, 'match', False, False, True)882 else:883 self.parse_scope(scope_tokens[0], ast, 'match', True, False, True)884 tokens_checked += scope_tokens[1]885 def parse_case(self, token_stream):886 tokens_checked = 0887 value = ""888 ast = {'current_case' : []}889 while tokens_checked < len(token_stream):890 token_type = token_stream[tokens_checked][0]891 token_value = token_stream[tokens_checked][1]892 print(tokens_checked, token_type, token_value)893 if token_type == "SEMIC" and token_value == ";": break894 if tokens_checked == 0:895 ast['current_case'].append({'case': token_value})896 elif tokens_checked == 1 and token_type != "ARROW":897 msg = f"SyntaxError at line {self.lines}\n{token_type, token_value} !='->' symbol is missing."898 self.error_message(msg, token_stream, tokens_checked)899 break900 elif tokens_checked == 2:901 value = token_value902 elif tokens_checked > 2:903 value += f" {token_value}"904 tokens_checked += 1905 self.token_index += tokens_checked906 ast['current_case'].append({'command' : value})907 return [ast, tokens_checked]908 def parse_macros(self, token_stream):909 """910 macros911 {912 define x, 10;913 redefine @echo, "print";914 }915 """916 tokens_checked = 0917 ast = {'macros': []}918 for token in range(0, len(token_stream)):919 token_type = token_stream[tokens_checked][0]920 token_value = token_stream[tokens_checked][1]921 if token_type == "SCOPE_DEFINIER" and token_value == "{": break922 tokens_checked += 1923 scope_tokens = self.get_scope(token_stream[tokens_checked + 1:len(token_stream)])924 self.parse_scope(scope_tokens[0], ast, 'macros', False, True)925 def parse_macros_define(self, token_stream, inScope):926 tokens_checked = 0927 ast = {'define': []}928 value = ""929 for token in range(len(token_stream)):930 token_type = token_stream[tokens_checked][0]931 token_value = token_stream[tokens_checked][1]932 if token_type == "SEMIC":break933 if token == 0:934 ast['define'].append({'function': token_value})935 elif token == 1 and token_type == "IDENTIFIER":936 ast['define'].append({'name': token_value})937 elif token == 2 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOLEAN", "COMPLEX_NUMBER"]:938 value = str(token_value)939 elif token > 2:940 value += str(token_value)941 tokens_checked += 1942 self.token_index += tokens_checked943 ast['define'].append({"value": value})944 if inScope == False:945 self.ast['main_scope'].append(ast)946 self.symbol_table.append([type(ast['define'][2]['value']), ast['define'][1]['name'], ast['define'][2]['value']])947 return [ast, tokens_checked]948 # ---------------------------BROWSER------------------------------------949 # -------------------------------CALL FUNCTION------------------------------950 def call_func(self, token_stream, inScope):951 tokens_checked = 0952 name = ""953 argument = ""954 ast = {'call_function': []}955 for token in range(0, len(token_stream)):956 token_type = token_stream[tokens_checked][0]957 token_value = token_stream[tokens_checked][1]958 if token_type == "SEMIC": break959 if token == 0:960 ast['call_function'].append({'name': token_value})961 elif token == 1 and token_type != "COLON":962 self.error_message("SyntaxError at line {}: ':' is missing".format(self.lines))963 elif token == 2:964 if token_value == "()": argument = ""965 else: argument = token_value966 elif token > 2 and token_type in ['COMMA', 'INTEGER', 'STRING', 'BOOL']:967 argument += token_value968 tokens_checked += 1969 self.token_index += tokens_checked970 ast['call_function'].append({'argument': argument})971 self.ast['main_scope'].append(ast)972 return [ast, tokens_checked]973 # --------------------------------------------------------------------------974 def error_message(self, msg, token_stream, token):975 tokens_checked = 1976 length = 0977 #This for loop will get amount of tokens in the error line978 for token in range(len(token_stream)):979 if token_stream[token][0] in ["SEMIC", "NEWLINE"]: break980 tokens_checked += 1981 print(msg)982 error_msg = " ".join(str(token[1]) for token in token_stream[:tokens_checked] if token[0] not in ["SEMIC", "NEWLINE"]) # Loops through each token in token_stream and forms a error line983 print("".join(error_msg[:-2] + ";" if error_msg[-1:] == ";" else error_msg))984 for i in range(len(token_stream)):985 if i == token: break986 else: length += len(token_stream[i][1])987 print(" " * length + "^")...

Full Screen

Full Screen

block.py

Source:block.py Github

copy

Full Screen

1import re2#import ply.lex as lex3def loadSymbolTable():4 5 symbolTable["keyword"] = keyword6 symbolTable["dataType"] = dataType7 symbolTable["preDefRoutine"] = preDefRoutine8lb = 09fp = 110def validLexeme(string):11 12 res = False13 if(string in keyword):14 #print("key " + string + "\n")15 res = "keyword"16 elif(string in dataType):17 #print("dataType " + string + "\n")18 res = "dataType"19 elif(string in preDefRoutine):20 res = "preDefRoutine"21 elif(re.match(identifier, string)):22 #print("id " + string + "\n")23 res = "identifier"24 elif(re.match(punctuator, string)):25 #print("punc " + string)26 res = "punctuator"27 elif(re.match(number, string)):28 res = "number"29 elif(re.match(aritmeticOperator, string)):30 res = "arithmeticOperator"31 elif(re.match(assignmentOperator, string)):32 res = "assignmentOperator"33 elif(string in relationalOperator):34 res = "relationalOperator"35 elif(string in logicalOperator):36 res = "logicalOperator"37 elif(string == "#"):38 res = "hashOperator"39 elif(string == ".h"):40 res = "headerExtension"41 elif(string == "true" or string == "false"):42 res = "boolean"43 elif(string == "++"):44 res = "incrementOperator"45 elif(string == "--"):46 res = "decrementOperator"47 return res48top = 0;49i_ = 1;50tmp = "";51li = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]52def push(val):53 global top,li54 top = top+155 li[top]=val;56 57def codegen():58 global tmp,i_,top,li59 tmp = "t"60 tmp+=str(i_)61 print(tmp +" = "+str(li[top-2]), str(li[top-1]), str(li[top]));62 top-=2;63 li[top]=tmp64 i_=i_+1;65def codegen_umin():66 global tmp,i_,top,li67 tmp = "t"68 tmp+=str(i_)69 print(tmp+" = -"+str(li[top]));70 top=top-1;71 li[top]=tmp;72 i_=i_+1;73def codegen_assign():74 global tmp,i_,top,li75 print(str(li[top-1])+" = "+str(li[top]));76 top=top-2;77label = 178def lab1():79 global label80 print("L"+str(label)+":")81 label = label+182 83def lab2():84 global tmp,i_,top,li,label85 tmp = "t"86 tmp+=str(i_)87 print(tmp+" = "+li[top-2],li[top-1],li[top]);88 print("if "+tmp+" goto L"+str(label-1));89 i_=i_+1;90 label = label-1;91 top = top-3;92def lexer():93 global lb94 global fp95 96 lexeme = prg[lb:fp]97 98 while(re.match(spaces, lexeme)):99 #print("x " + lexeme + "\n")100 lb = lb + 1101 fp = fp + 1102 lexeme = prg[lb:fp]103 104 #if(re.match(spaces, prg[105 #print("lexeme: " + lexeme + " type: " + str(type(lexeme)) + "\n");106 res = validLexeme(lexeme)107 while((not res) and (fp <= len(prg))):108 #print("lexeme1: " + lexeme + "\n")109 fp = fp + 1110 lexeme = prg[lb:fp]111 res = validLexeme(lexeme)112 113 #print(lexeme + "\n")114 tokenType = res115 res = validLexeme(lexeme)116 while((res) and (fp <= len(prg))):117 #print("lexeme2: " + lexeme + "\n")118 fp = fp + 1119 lexeme = prg[lb:fp]120 tokenType = res121 res = validLexeme(lexeme)122 123 lexeme = prg[lb:fp - 1]124 lb = fp - 1125 126 if((tokenType != False) and (tokenType not in symbolTable)):127 symbolTable[tokenType] = list()128 129 if((tokenType != False) and lexeme not in symbolTable[tokenType]):130 symbolTable[tokenType].append(lexeme.strip())131 132 #print("TOKEN: " + str(lexeme) + " TYPE: " + str(tokenType) + "\n");133 #print(str(lb) + " " + str(fp) + "\n")134 #print(str(len(prg)))135 return dict({tokenType:lexeme})136def parse_start():137 status = program()138 139 print("SUCCESSFUL PARSING\n") if(status == 0) else print("FAILED PARSING\n")140 141def program():142 status = preProcessorDirective()143 144 if(status == 0):145 status = externDeclaration()146 147 if(status == 0):148 status = mainFunction()149 150 return status151def preProcessorDirective():152 status = 0153 token = lexer()154 155 token_type = list(token.keys())[0]156 token_value = list(token.values())[0]157 158 if(token_type == "hashOperator"):159 160 token = lexer()161 token_type = list(token.keys())[0]162 token_value = list(token.values())[0]163 164 if(token_type == "keyword" and token_value == "include"):165 166 token = lexer()167 token_type = list(token.keys())[0]168 token_value = list(token.values())[0]169 170 if(token_type == "relationalOperator" and token_value == "<"):171 172 token = lexer()173 token_type = list(token.keys())[0]174 token_value = list(token.values())[0]175 176 if(token_type == "identifier"):177 178 token = lexer()179 token_type = list(token.keys())[0]180 token_value = list(token.values())[0]181 182 183 if(token_type == "headerExtension"):184 185 token = lexer()186 token_type = list(token.keys())[0]187 token_value = list(token.values())[0] 188 189 if(token_type == "relationalOperator" and token_value == ">"):190 191 status = preProcessorDirective()192 #print(str(status) + " after return\n")193 194 else:195 print("Syntax error: expected '>' but received " + str(token_value) + "\n")196 status = 1197 else:198 print("Syntax error: expected 'Header Extension' but received " + str(token_value) + "\n")199 status = 1200 201 else:202 print("Syntax error: expected 'Identifer' but received " + str(token_value) + "\n")203 status = 1204 else: 205 print("Syntax error: expected '<' but received " + str(token_value) + "\n")206 status = 1207 208 elif(token_type == "keyword" and token_value == "define"):209 210 211 token = lexer()212 token_type = list(token.keys())[0]213 token_value = list(token.values())[0]214 215 if(token_type == "identifier"):216 217 variableName = token_value218 token = lexer()219 token_type = list(token.keys())[0]220 token_value = list(token.values())[0]221 222 if(token_type == "number"):223 224 variableValue = int(token_value.strip())225 symbolTable[variableName] = variableValue226 status = preProcessorDirective()227 228 229 else:230 print("Syntax error: expected 'Number' but received " + str(token_value) + "\n")231 status = 1232 else:233 print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")234 status = 1235 236 else:237 print("Syntax error: expected 'Keyword include/define' but received " + str(token_value) + "\n")238 status = 1239 else:240 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED241 global lb, fp242 lb = lb - len(token_value)243 fp = fp - len(token_value)244 245 return status246 #print("Token key: " + str((token_type) + " values: " + str(token_value) + "\n")) 247def externDeclaration():248 249 250 status = 0251 token = lexer()252 token_type = list(token.keys())[0]253 token_value = list(token.values())[0]254 if(token_type == "keyword" and token_value == "extern"):255 status = declarationStatement()256 if(status == 0):257 258 token = lexer()259 token_type = list(token.keys())[0]260 token_value = list(token.values())[0].strip()261 if(not (token_type == "punctuator" and token_value == ";")):262 print("Syntax error: expected 'Punctuator Semicolon1' but received " + str(token_value) + "\n")263 status = 1264 else:265 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED266 global lb, fp267 lb = lb - len(token_value)268 fp = fp - len(token_value) 269 return status270def declarationStatement():271 272 status = 0273 token = lexer()274 token_type = list(token.keys())[0]275 token_value = list(token.values())[0]276 if(token_type == 'dataType'):277 278 dataType = token_value.strip()279 status = variable(dataType)280 281 else:282 print("Syntax error: expected 'Data Type' but received " + str(token_value) + "\n")283 status = 1284 285 return status286 287def optionalDeclarationStatement():288 289 #print("IN OPTDECL")290 status = 0291 token = lexer()292 token_type = list(token.keys())[0]293 token_value = list(token.values())[0]294 #print("before reset: " + str(token_value))295 if(token_type == 'dataType'):296 297 298 dataType = token_value.strip()299 status = variable(dataType)300 301 else:302 303 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED304 #print("resetting")305 global lb, fp306 lb = lb - len(token_value)307 fp = fp - len(token_value)308 status = 2309 """310 if(token_value != "do"):311 token = lexer()312 token_type = list(token.keys())[0]313 token_value = list(token.values())[0]314 """315 #print("after reset: " + str(token_value))316 return status317 318 319def variable(dataType):320 status = 0321 token = lexer()322 token_type = list(token.keys())[0]323 token_value = list(token.values())[0]324 325 if(token_type == 'identifier'):326 327 #print("received identifier, " + str(token_value))328 variableName = token_value.strip()329 330 if(dataType not in externalVariables):331 externalVariables[dataType] = list()332 333 if(variableName not in externalVariables[dataType]):334 externalVariables[dataType].append(variableName)335 else:336 print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")337 status = 1338 #externalVariables.append([variableName, dataType])339 if(status==0):340 status = variableDash(dataType)341 else:342 print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")343 status = 1344 345 return status346def variableDash(dataType):347 status = 0348 token = lexer()349 token_type = list(token.keys())[0]350 token_value = list(token.values())[0]351 352 if(token_type == 'punctuator' and token_value == ','):353 354 token = lexer()355 token_type = list(token.keys())[0]356 token_value = list(token.values())[0]357 358 if(token_type == 'identifier'):359 360 variableName = token_value.strip()361 if(dataType not in externalVariables):362 externalVariables[dataType] = list() 363 364 if(variableName not in externalVariables[dataType]):365 externalVariables[dataType].append(variableName)366 else:367 print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")368 status = 1369 if(status==0):370 variableDash(dataType)371 372 else:373 print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")374 status = 1375 else:376 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED377 global lb, fp378 #print(token_value)379 #print(str(lb) + " " + str(fp))380 lb = lb - len(token_value)381 fp = fp - len(token_value)382 #print(str(lb) + " " + str(fp))383 return status384 385def mainFunction():386 status = 0387 token = lexer()388 token_type = list(token.keys())[0]389 token_value = list(token.values())[0]390 391 if(token_type == "dataType" and token_value == "int"):392 393 status = mainDash()394 395 else:396 print("Syntax error: expected 'Return Type Integer' but received " + str(token_value) + "\n")397 status = 1398 399 return status400 401 402def mainDash():403 status = 0404 token = lexer()405 token_type = list(token.keys())[0]406 token_value = list(token.values())[0].strip()407 408 #print(str(token_type) + " " + str(token_value))409 410 if(token_type == "identifier" and token_value == "main"):411 412 token = lexer()413 token_type = list(token.keys())[0]414 token_value = list(token.values())[0].strip()415 416 if(token_type == "punctuator" and token_value == "("):417 418 token = lexer()419 token_type = list(token.keys())[0]420 token_value = list(token.values())[0].strip()421 422 if(token_type == "punctuator" and token_value == ")"):423 424 token = lexer()425 token_type = list(token.keys())[0]426 token_value = list(token.values())[0].strip()427 428 if(token_type == "punctuator" and token_value == "{"):429 430 status = statements()431 432 if(status == 0):433 434 token = lexer()435 token_type = list(token.keys())[0]436 token_value = list(token.values())[0].strip()437 #print(token_value + str(len(token_value)))438 if(not(token_type == "punctuator" and token_value == "}")):439 print("Syntax error: expected 'Punctuator1 close curly bracket' but received " + str(token_value) + "\n")440 status = 1441 else:442 print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")443 status = 1444 445 446 447 elif(token_type == "dataType" and token_value == "int"):448 449 token = lexer()450 token_type = list(token.keys())[0]451 token_value = list(token.values())[0].strip()452 453 if(token_type == "identifier" and token_value == "argc"):454 455 token = lexer()456 token_type = list(token.keys())[0].strip()457 token_value = list(token.values())[0].strip()458 459 if(token_type == "punctuator" and token_value == ","):460 461 token = lexer()462 token_type = list(token.keys())[0]463 token_value = list(token.values())[0].strip()464 465 if(token_type == "dataType" and token_value == "char"):466 467 token = lexer()468 token_type = list(token.keys())[0]469 token_value = list(token.values())[0].strip()470 471 if(token_type == "arithmeticOperator" and token_value == "*"):472 473 token = lexer()474 token_type = list(token.keys())[0]475 token_value = list(token.values())[0] .strip()476 477 if(token_type == "identifier" and token_value == "argv"):478 479 token = lexer()480 token_type = list(token.keys())[0]481 token_value = list(token.values())[0].strip()482 483 if(token_type == "punctuator" and token_value == "["):484 485 token = lexer()486 token_type = list(token.keys())[0]487 token_value = list(token.values())[0].strip()488 489 if(token_type == "punctuator" and token_value == "]"):490 491 token = lexer()492 token_type = list(token.keys())[0]493 token_value = list(token.values())[0].strip()494 495 if(token_type == "punctuator" and token_value == ")"):496 497 token = lexer()498 token_type = list(token.keys())[0]499 token_value = list(token.values())[0].strip()500 501 if(token_type == "punctuator" and token_value == "{"):502 503 status = statements()504 505 if(status == 0):506 507 token = lexer()508 token_type = list(token.keys())[0]509 token_value = list(token.values())[0].strip()510 511 if(not(token_type == "punctuator" and token_value == "}")):512 print("Syntax error: expected 'Punctuator2 close curly bracket' ", end = "")513 print("but received " + str(token_value) + "\n")514 status = 1515 else:516 print("Syntax error: expected 'Punctuator open curly bracket' ", end = "")517 print("but received " + str(token_value) + "\n")518 status = 1519 520 else:521 print("Syntax error: expected 'Punctuator close round bracket' but received ", end = "")522 print(str(token_value) + "\n")523 status = 1524 525 else:526 print("Syntax error: expected 'Punctuator close square bracket' but received ", end = "")527 print(str(token_value) + "\n")528 status = 1529 else:530 print("Syntax error: expected 'Punctuator open square bracket' but received ", end = "")531 print(str(token_value) + "\n")532 status = 1533 534 else:535 print("Syntax error: expected 'Identifier argv' but received " + str(token_value) + "\n")536 status = 1537 538 else:539 print("Syntax error: expected 'Pointer operator *' but received " + str(token_value) + "\n")540 status = 1541 542 else:543 print("Syntax error: expected 'Data type character' but received " + str(token_value) + "\n")544 status = 1545 546 else:547 print("Syntax error: expected 'Punctuator comma' but received " + str(token_value) + "\n")548 status = 1 549 550 else:551 print("Syntax error: expected 'Identifier argc' but received " + str(token_value) + "\n")552 status = 1553 554 555 else:556 print("Syntax error: expected 'Punctuator close round bracket' but received " + str(token_value) + "\n")557 status = 1558 559 else:560 print("Syntax error: expected 'Punctuator open round bracket' but received " + str(token_value) + "\n")561 status = 1562 563 else:564 print("Syntax error: expected 'Identifier main' but received " + str(token_value) + "\n")565 status = 1566 567 return status568data = {}569def statements():570 571 572 #print("top of statements\n")573 status = 0574 status = initializationStatement()575 576 if(status == 0):577 #print("init success")578 token = lexer()579 token_type = list(token.keys())[0]580 token_value = list(token.values())[0]581 #print(token_value +" new value")582 tv = token_value.strip()583 if(token_type == "punctuator" and tv == ";"):584 status = statements()585 else:586 print("Syntax error: expected 'Punctuator semicolon2' but received " + str(token_value) + "\n")587 status = 1588 589 590 else:591 ''' token = lexer()592 token_type = list(token.keys())[0]593 token_value = list(token.values())[0]594 tv = token_value.strip()'''595 #print("dc" + " " + tv)596 597 598 status = optionalDeclarationStatement()599 #print(status)600 if(status == 0): 601 #print("decl success")602 603 token = lexer()604 token_type = list(token.keys())[0]605 token_value = list(token.values())[0]606 tv = token_value.strip()607 if(token_type == "punctuator" and tv == ";"):608 609 status = statements()610 else:611 print("Syntax error: expected 'Punctuator semicolon3' but received " + str(token_value) + "\n")612 status = 1613 else:614 615 status = assignmentStatement()616 if(status == 0):617 #print("assgn success")618 619 token = lexer()620 token_type = list(token.keys())[0]621 token_value = list(token.values())[0]622 tv = token_value.strip()623 if(token_type == "punctuator" and tv == ";"):624 status = statements()625 else:626 print("Syntax error: expected 'Punctuator semicolon4' but received " + str(token_value) + "\n")627 status = 1628 else:629 630 status = 0631 token = lexer()632 token_type = list(token.keys())[0]633 token_value = list(token.values())[0]634 #print("IN statements: " + token_value)635 if(token_type == "keyword" and token_value == "do"):636 #print("Do")637 token = lexer()638 token_type = list(token.keys())[0]639 token_value = list(token.values())[0].strip()640 lab1()641 if(token_type == "punctuator" and token_value == "{"):642 #print("{")643 status = statements()644 645 #print("status: " + str(status))646 if(status == 0):647 648 token = lexer()649 token_type = list(token.keys())[0]650 token_value = list(token.values())[0].strip()651 #print(token_value)652 if(token_type == "punctuator" and token_value == "}"):653 #print("}")654 token = lexer()655 token_type = list(token.keys())[0]656 token_value = list(token.values())[0].strip()657 658 if(token_type == "keyword" and token_value == "while"):659 #print("while")660 token = lexer()661 token_type = list(token.keys())[0]662 token_value = list(token.values())[0].strip()663 664 if(token_type == "punctuator" and token_value == "("):665 #print("(")666 status = condition()667 lab2()668 if(status == 0):669 670 token = lexer()671 token_type = list(token.keys())[0]672 token_value = list(token.values())[0].strip()673 674 if(token_type == "punctuator" and token_value == ")"):675 #print(")")676 token = lexer()677 token_type = list(token.keys())[0]678 token_value = list(token.values())[0].strip()679 680 if(token_type == "punctuator" and token_value == ";"):681 #print("in statements: " + token_value + "\n")682 status = statements()683 684 else:685 print("Syntax error: expected 'Punctuator semicolon5' ", end = "")686 print("but received " + str(token_value) + "\n")687 status = 1688 689 else:690 print("Syntax error: expected 'Punctuator close round bracket' ", end = "")691 print("but received " + str(token_value) + "\n")692 status = 1693 694 else:695 print("Syntax error: expected 'Punctuator open round bracket' ", end = "") 696 print("but received " + str(token_value) + "\n")697 status = 1698 699 else:700 print("Syntax error: expected 'Keyword while' but received " + str(token_value) + "\n")701 status = 1702 703 else:704 print("Syntax error: expected 'Punctuator10 close curly bracket' but received " + str(token_value) + "\n")705 status = 1706 elif(token_type == "identifier" or token_type == "datatype"):707 global lb, fp708 #print(token_value)709 #print(str(lb) + " " + str(fp))710 lb = lb - len(token_value)711 fp = fp - len(token_value)712 status = statement1()713 714 else:715 print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")716 status = 1717 718 719 else:720 status = 0721 tv = token_value.strip()722 #print("IN statements: " + token_value)723 if(tv == "{"):724 status = statements()725 726 #print("status: " + str(status))727 if(status == 0):728 729 token = lexer()730 token_type = list(token.keys())[0]731 token_value = list(token.values())[0].strip()732 #print(token_value)733 if(token_type == "punctuator" and token_value == "}"):734 status = statements()735 else:736 print("Error")737 else:738 739 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED740 #global lb, fp741 #print(token_value)742 #print(str(lb) + " " + str(fp))743 lb = lb - len(token_value)744 fp = fp - len(token_value)745 746 747 return status748def statement1():749 status = 0750 status = initializationStatement()751 752 if(status == 0):753 #print("init success")754 token = lexer()755 token_type = list(token.keys())[0]756 token_value = list(token.values())[0]757 #print(token_value +" new value")758 tv = token_value.strip()759 if(token_type == "punctuator" and tv == ";"):760 status = 0761 else:762 status = 1763 print("Error")764 else:765 766 status = assignmentStatement()767 if(status == 0):768 #print("assgn success")769 770 token = lexer()771 token_type = list(token.keys())[0]772 token_value = list(token.values())[0]773 tv = token_value.strip()774 if(token_type == "punctuator" and tv == ";"):775 status = 0776 else:777 status = 1778 print("Error")779 if(status ==0):780 token = lexer()781 token_type = list(token.keys())[0]782 token_value = list(token.values())[0].strip()783 784 if(token_type == "keyword" and token_value == "while"):785 #print("while")786 token = lexer()787 token_type = list(token.keys())[0]788 token_value = list(token.values())[0].strip()789 if(token_type == "punctuator" and token_value == "("):790 #print("(")791 status = condition()792 lab2()793 if(status == 0):794 token = lexer()795 token_type = list(token.keys())[0]796 token_value = list(token.values())[0].strip()797 if(token_type == "punctuator" and token_value == ")"):798 #print(")")799 token = lexer()800 token_type = list(token.keys())[0]801 token_value = list(token.values())[0].strip()802 if(token_type == "punctuator" and token_value == ";"):803 #print("in statements: " + token_value + "\n")804 status = statements()805 else:806 print("Syntax error: expected 'Punctuator semicolon5' ", end = "")807 print("but received " + str(token_value) + "\n")808 status = 1809 else:810 print("Syntax error: expected 'Punctuator close round bracket' ", end = "")811 print("but received " + str(token_value) + "\n")812 status = 1813 else:814 print("Syntax error: expected 'Punctuator open round bracket' ", end = "") 815 print("but received " + str(token_value) + "\n")816 status = 1817 else:818 print("Syntax error: expected 'Keyword while' but received " + str(token_value) + "\n")819 status = 1820 else:821 print("Syntax error: expected 'Punctuator10 close curly bracket' but received " + str(token_value) + "\n")822 status = 1823 return status824def initializationStatement():825 status = 0826 827 global lb, fp828 829 token = lexer()830 token_type = list(token.keys())[0]831 token_value = list(token.values())[0]832 if(token_type == "dataType"):833 if(token_value not in data):834 data[token_value] = {};835 #print(token_value)836 837 status = initStat(token_value)838 839 840 else:841 842 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED843 #print(token_value)844 #print(str(lb) + " " + str(fp))845 lb = lb - len(token_value)846 fp = fp - len(token_value)847 status = 2848 #print('returning' + str(status)) 849 return status850 851 852def initStat(dt):853 status = multipleInitialization(dt)854 #print(status)855 856 857 858 '''if(status != 0 and status != 2):859 status = 0860 token = lexer()861 token_type = list(token.keys())[0]862 token_value = list(token.values())[0]863 tk = token_value864 if(token_type == "identifier"):865 866 if(token_value not in data[dt]):867 data[dt][token_value]=0868 else:869 print("Syntax Error: The variable has already been initialized\n")870 return 1871 token = lexer()872 token_type = list(token.keys())[0]873 token_value = list(token.values())[0]874 875 if(token_type == "assignmentOperator" and token_value == "="):876 877 status = E(dt,tk)878 """879 print(status)880 status = 0881 token = lexer()882 token_type = list(token.keys())[0]883 token_value = list(token.values())[0]884 print(token_value)885 """886 887 elif(token_type == "punctuator" and token_value == ","):888 889 global lb, fp890 #print(token_value)891 #print(str(lb) + " " + str(fp))892 lb = lb - len(token_value)893 fp = fp - len(token_value)894 status = 2895 896 else:897 898 print("Syntax error: expected 'Assignment1 Operator' but received " + str(token_value) + "\n")899 status = 1 '''900 901 902 return status903 904def multipleInitialization(dt):905 global data906 status = 0907 token = lexer()908 token_type = list(token.keys())[0]909 token_value = list(token.values())[0]910 tk = token_value911 if(token_type == "identifier"):912 push(tk)913 #print(tk)914 if(token_value not in data[dt]):915 if(dt=="int"):916 data[dt][token_value]=int(0)917 elif(dt=="char"):918 data[dt][token_value]=string(0)919 elif(dt=="float"):920 data[dt][token_value]=float(0)921 elif(dt=="double"):922 data[dt][token_value]=float(0)923 else:924 data[dt][token_value]=0925 #print(" "+token_value +":)")926 else:927 print("Syntax Error: The variable has already been initialized\n")928 return 1929 930 token = lexer()931 token_type = list(token.keys())[0]932 token_value = list(token.values())[0]933 tv = token_value.strip()934 #print(token_value+" macha")935 if(tv == ";"):936 #print("; la")937 global lb, fp938 #print(token_value)939 #print(str(lb) + " " + str(fp))940 lb = lb - len(token_value)941 fp = fp - len(token_value)942 return 0;943 elif(token_type == "assignmentOperator" and tv == "="):944 945 status = E(dt,tk)946 codegen_assign()947 #print(status)948 949 if(status == 0):950 951 status = multinit(dt)952 if(status == 2):953 status = 0954 #print(status)955 elif(token_type == "punctuator" and tv == ","):956 #print(",")957 status = multipleInitialization(dt)958 '''global lb, fp959 #print(token_value)960 #print(str(lb) + " " + str(fp))961 lb = lb - len(token_value)962 fp = fp - len(token_value)963 status = 2 '''964 965 else:966 967 print("Syntax error: expected 'Assignment2 Operator' but received " + str(tv) + "\n")968 status = 1969 else:970 971 print("Syntax error: expected 'Identifier' but received " + str(tv) + "\n")972 status = 1973 974 return status975 976def multinit(dt):977 status = 0978 979 token = lexer()980 token_type = list(token.keys())[0]981 token_value = list(token.values())[0]982 tv = token_value.strip()983 984 if(token_type == "punctuator" and tv == ","):985 986 #print("got comma")987 status = multipleInitialization(dt)988 989 else:990 991 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED992 global lb, fp993 #print(token_value)994 #print(str(lb) + " " + str(fp))995 lb = lb - len(token_value)996 fp = fp - len(token_value)997 status = 2998 999 return status1000def assignmentStatement():1001 global data1002 dty =''1003 status = 01004 token = lexer()1005 token_type = list(token.keys())[0]1006 token_value = list(token.values())[0]1007 tk = token_value1008 #print("asgn")1009 if(token_type == "identifier"):1010 push(tk)1011 #print(tk)1012 for i in data:1013 for j in data[i]:1014 if(j==token_value):1015 dty = i1016 if(dty==''):1017 print("The variable "+token_value+" has not been initialized.")1018 return 11019 token = lexer()1020 token_type = list(token.keys())[0]1021 token_value = list(token.values())[0]1022 1023 if(token_type == "assignmentOperator" and token_value == "="):1024 1025 status = E(dty,tk)1026 codegen_assign()1027 1028 else:1029 1030 print("Syntax error: expected 'Assignment3 Operator' but received " + str(token_value) + "\n")1031 status = 11032 else:1033 1034 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1035 global lb, fp1036 #print(token_value)1037 #print(str(lb) + " " + str(fp))1038 lb = lb - len(token_value)1039 fp = fp - len(token_value)1040 status = 21041 1042 return status1043def condition():1044 status = 01045 1046 status = C()1047 1048 return status1049def C():1050 status = 01051 token = lexer()1052 token_type = list(token.keys())[0]1053 token_value = list(token.values())[0]1054 tv = token_value.strip()1055 if(token_type == "identifier" or token_type=="number"):1056 push(tv)1057 token = lexer()1058 token_type = list(token.keys())[0]1059 token_value = list(token.values())[0]1060 tk = token_value.strip()1061 if(token_type == "relationalOperator" or token_type == "logicalOperator"):1062 push(tk)1063 status = C() 1064 elif(token_value == ")"):1065 global lb, fp1066 #print(token_value)1067 #print(str(lb) + " " + str(fp))1068 lb = lb - len(token_value)1069 fp = fp - len(token_value)1070 return 01071 else:1072 return 11073 elif(not (token_type == "boolean")):1074 1075 print("Syntax error: expected 'Boolean' but received " + str(token_value) + "\n")1076 status = 11077 return status1078op = ""1079def E(dt,vn):1080 status = F(dt,vn)1081 if(status == 0):1082 1083 status = E1(dt,vn)1084 1085 return status1086 1087def E1(dt,vn):1088 status = 01089 token = lexer()1090 token_type = list(token.keys())[0]1091 token_value = list(token.values())[0]1092 tv = token_value.strip()1093 global op;1094 if(token_type == "arithmeticOperator" and tv == "+"):1095 op = "+"1096 push(tv)1097 #print(tv)1098 status = F(dt,vn)1099 codegen()1100 if(status == 0):1101 1102 status = E1(dt,vn)1103 1104 else:1105 1106 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1107 global lb, fp1108 #print(token_value)1109 #print(str(lb) + " " + str(fp))1110 lb = lb - len(token_value)1111 fp = fp - len(token_value)1112 return status1113def F(dt,vn):1114 status = 01115 1116 status = G(dt,vn)1117 1118 if(status == 0):1119 1120 status = F1(dt,vn)1121 return status1122 1123def F1(dt,vn):1124 status = 01125 token = lexer()1126 token_type = list(token.keys())[0]1127 token_value = list(token.values())[0]1128 tv = token_value.strip()1129 global op;1130 if(token_type == "arithmeticOperator" and tv == "-"):1131 op = "-"1132 push(tv)1133 #print(tv)1134 status = G(dt,vn)1135 codegen()1136 1137 if(status == 0):1138 1139 status = F1(dt,vn)1140 1141 else:1142 1143 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1144 global lb, fp1145 #print(token_value)1146 #print(str(lb) + " " + str(fp))1147 lb = lb - len(token_value)1148 fp = fp - len(token_value)1149 return status1150 1151def G(dt,vn):1152 status = 01153 1154 status = H(dt,vn)1155 if(status == 0):1156 1157 status = G1(dt,vn)1158 return status1159def G1(dt,vn):1160 status = 01161 1162 token = lexer()1163 token_type = list(token.keys())[0]1164 token_value = list(token.values())[0]1165 tv = token_value.strip()1166 global op;1167 if(token_type == "arithmeticOperator" and tv == "*"):1168 push(tv)1169 #print(tv)1170 op = "*"1171 status = H(dt,vn)1172 codegen()1173 if(status == 0):1174 1175 status = G1(dt,vn)1176 1177 else:1178 1179 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1180 global lb, fp1181 #print(token_value)1182 #print(str(lb) + " " + str(fp))1183 lb = lb - len(token_value)1184 fp = fp - len(token_value)1185 return status1186 1187def H(dt,vn):1188 status = 01189 1190 status = I(dt,vn)1191 1192 if(status == 0):1193 1194 status = H1(dt,vn)1195 return status1196 1197def H1(dt,vn):1198 status = 01199 1200 token = lexer()1201 token_type = list(token.keys())[0]1202 token_value = list(token.values())[0]1203 tv = token_value.strip()1204 1205 if(token_type == "arithmeticOperator" and tv == "/"):1206 global op;1207 op = "d";1208 push(tv)1209 #print(tv)1210 status = I(dt,vn)1211 codegen()1212 if(status == 0):1213 1214 status = H1(dt,vn)1215 1216 else:1217 1218 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1219 global lb, fp1220 #print(token_value + ":::" + str(len(token_value)))1221 #print(str(lb) + " " + str(fp))1222 1223 lb = lb - len(token_value)1224 fp = fp - len(token_value)1225 return status1226 1227def I(dt,vn):1228 global data1229 status = 01230 chk = 01231 token = lexer()1232 token_type = list(token.keys())[0]1233 token_value = list(token.values())[0]1234 tv = token_value.strip()1235 1236 if(token_type == "arithmeticOperator" and tv == "-"):1237 chk = 11238 push(tv)1239 #print(tv)1240 status = I()1241 codegen_umin()1242 elif(not(token_type == "identifier" or token_type == "number")):1243 print("Syntax error: expected 'Identifier/Number' but received " + str(token_value) + "\n")1244 status = 11245 return status1246 if(token_type == "identifier" or token_type == "number"):1247 push(tv)1248 #print(tv)1249 global op;1250 g = True1251 if(token_value == "identifier"):1252 if(token_value not in data[dt]):1253 print("Syntax error: The variable "+token_value+" not in "+dt)1254 g = False1255 elif(token_value == "number"):1256 if(not isinstance(token_value,dt)):1257 print("Syntax error: The variable belongs to a different type")1258 False1259 if(op=="" and g == True):1260 if(token_type == "identifier"):1261 if(chk==1):1262 data[dt][vn]=-1*data[dt][token_value]1263 chk = 01264 else:1265 #print(token_value)1266 data[dt][vn]=data[dt][token_value]1267 1268 if(token_type == "number"):1269 if(chk==1):1270 data[dt][vn]=-1*float(token_value)1271 chk = 01272 else:1273 data[dt][vn]=float(token_value)1274 elif(op=="d" and g == True):1275 if(token_type == "identifier"):1276 if(chk==1):1277 data[dt][vn]/=-1*data[dt][token_value]1278 chk = 01279 op=""1280 else:1281 data[dt][vn]/=data[dt][token_value]1282 op=""1283 1284 if(token_type == "number"):1285 if(chk==1):1286 data[dt][vn]/=-1*float(token_value)1287 chk = 01288 op = ""1289 else:1290 data[dt][vn]/=float(token_value)1291 op = ""1292 elif(op=="*" and g == True):1293 if(token_type == "identifier"):1294 if(chk==1):1295 data[dt][vn]*=-1*data[dt][token_value]1296 chk = 01297 op=""1298 else:1299 data[dt][vn]*=data[dt][token_value]1300 op=""1301 1302 if(token_type == "number"):1303 if(chk==1):1304 data[dt][vn]*=-1*float(token_value)1305 chk = 01306 op = ""1307 else:1308 data[dt][vn]*=float(token_value)1309 op = ""1310 elif(op=="-" and g == True):1311 if(token_type == "identifier"):1312 if(chk==1):1313 data[dt][vn]-=-1*data[dt][token_value]1314 chk = 01315 op=""1316 else:1317 data[dt][vn]-=data[dt][token_value]1318 op=""1319 1320 if(token_type == "number"):1321 if(chk==1):1322 data[dt][vn]-=-1*float(token_value)1323 chk = 01324 op = ""1325 else:1326 data[dt][vn]-=float(token_value)1327 op = ""1328 elif(op=="+" and g == True):1329 if(token_type == "identifier"):1330 if(chk==1):1331 data[dt][vn]+=-1*data[dt][token_value]1332 chk = 01333 op=""1334 else:1335 data[dt][vn]+=data[dt][token_value]1336 op=""1337 1338 if(token_type == "number"):1339 if(chk==1):1340 data[dt][vn]+=-1*float(token_value)1341 chk = 01342 op = ""1343 else:1344 data[dt][vn]+=float(token_value)1345 op = ""1346 return status1347 1348 1349 1350prg = open("nocomments.c").read()1351symbolTable = dict()1352externalVariables = dict()1353localVariables = list()1354keyword = ["include", "define", "while", "do", "for", "return", "extern"]1355dataType = ["void", "int", "short", "long", "char", "float", "double"]1356preDefRoutine = ["printf", "scanf"]1357#headerFile = ["stdio.h", "stdlib.h", "math.h", "string.h"]1358identifier = "^[^\d\W]\w*\Z"1359punctuator = "^[()[\]{};.,]$"1360aritmeticOperator = "^[-+*/]$"1361assignmentOperator = "^=$"1362relationalOperator = ["<", ">", "<=", ">=", "==", "!="]1363logicalOperator = ["&&", "||", "!"]1364number = "^\d+$"1365spaces = "[' ''\n''\t']"1366loadSymbolTable()1367parse_start()1368'''1369for i in data:1370 for j in data[i]:1371 print(i+" "+j+" "+str(data[i][j]))1372'''1373"""1374while lb!=len(prg):1375 lexer()1376"""1377#print(symbolTable)1378#print(externalVariables)1379"""1380PARSER ERROR CODES:13810-SUCCESS13821-FAILURE1383"""1384 ...

Full Screen

Full Screen

syn.py

Source:syn.py Github

copy

Full Screen

1import re2#import ply.lex as lex3def loadSymbolTable():4 5 symbolTable["keyword"] = keyword6 symbolTable["dataType"] = dataType7 symbolTable["preDefRoutine"] = preDefRoutine8lb = 09fp = 110def validLexeme(string):11 12 res = False13 if(string in keyword):14 #print("key " + string + "\n")15 res = "keyword"16 elif(string in dataType):17 #print("dataType " + string + "\n")18 res = "dataType"19 elif(string in preDefRoutine):20 res = "preDefRoutine"21 elif(re.match(identifier, string)):22 #print("id " + string + "\n")23 res = "identifier"24 elif(re.match(punctuator, string)):25 #print("punc " + string)26 res = "punctuator"27 elif(re.match(number, string)):28 res = "number"29 elif(re.match(aritmeticOperator, string)):30 res = "arithmeticOperator"31 elif(re.match(assignmentOperator, string)):32 res = "assignmentOperator"33 elif(string in relationalOperator):34 res = "relationalOperator"35 elif(string in logicalOperator):36 res = "logicalOperator"37 elif(string == "#"):38 res = "hashOperator"39 elif(string == ".h"):40 res = "headerExtension"41 elif(string == "true" or string == "false"):42 res = "boolean"43 elif(string == "++"):44 res = "incrementOperator"45 elif(string == "--"):46 res = "decrementOperator"47 return res48top = 0;49i_ = 1;50tmp = "";51do = 052li = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]53def push(val):54 global top,li55 top = top+156 li[top]=val;57 58def codegen():59 global tmp,i_,top,li60 tmp = "N"61 tmp+=str(i_)62 print("NODE "+tmp +" -> "+str(li[top-2])+" <-- "+ str(li[top-1])+" --> "+ str(li[top]));63 #if(do==1):64 #print("do --> "+tmp)65 top-=2;66 li[top]=tmp67 i_=i_+1;68def codegen_umin():69 global tmp,i_,top,li70 tmp = "t"71 tmp+=str(i_)72 print(tmp+" = -"+str(li[top]));73 top=top-1;74 li[top]=tmp;75 i_=i_+1;76def codegen_assign():77 global tmp,i_,top,li78 print(str(li[top-1])+" <-- = --> "+str(li[top]));79 if(do!=0):80 print("do --> =")81 else:82 print("main --> =")83 top=top-2;84label = 185def lab1():86 global label87 #print("L"+str(label)+":")88 label = label+189wh = ["dd"] 90def lab2():91 global tmp,i_,top,li,label,wh92 tmp = "N"93 tmp+=str(i_)94 print("NODE "+tmp +" -> "+str(li[top-2])+" <-- "+ str(li[top-1])+" --> "+ str(li[top]));95 #print("if "+tmp+" goto L"+str(label-1));96 i_=i_+1;97 wh[0]=tmp98 label = label-1;99 top = top-3;100def lexer():101 global lb102 global fp103 104 lexeme = prg[lb:fp]105 106 while(re.match(spaces, lexeme)):107 #print("x " + lexeme + "\n")108 lb = lb + 1109 fp = fp + 1110 lexeme = prg[lb:fp]111 112 #if(re.match(spaces, prg[113 #print("lexeme: " + lexeme + " type: " + str(type(lexeme)) + "\n");114 res = validLexeme(lexeme)115 while((not res) and (fp <= len(prg))):116 #print("lexeme1: " + lexeme + "\n")117 fp = fp + 1118 lexeme = prg[lb:fp]119 res = validLexeme(lexeme)120 121 #print(lexeme + "\n")122 tokenType = res123 res = validLexeme(lexeme)124 while((res) and (fp <= len(prg))):125 #print("lexeme2: " + lexeme + "\n")126 fp = fp + 1127 lexeme = prg[lb:fp]128 tokenType = res129 res = validLexeme(lexeme)130 131 lexeme = prg[lb:fp - 1]132 lb = fp - 1133 134 if((tokenType != False) and (tokenType not in symbolTable)):135 symbolTable[tokenType] = list()136 137 if((tokenType != False) and lexeme not in symbolTable[tokenType]):138 symbolTable[tokenType].append(lexeme.strip())139 140 #print("TOKEN: " + str(lexeme) + " TYPE: " + str(tokenType) + "\n");141 #print(str(lb) + " " + str(fp) + "\n")142 #print(str(len(prg)))143 return dict({tokenType:lexeme})144def parse_start():145 status = program()146 147 print("SUCCESSFUL PARSING\n") if(status == 0) else print("FAILED PARSING\n")148 149def program():150 status = preProcessorDirective()151 152 if(status == 0):153 status = externDeclaration()154 155 if(status == 0):156 status = mainFunction()157 158 return status159def preProcessorDirective():160 status = 0161 token = lexer()162 163 token_type = list(token.keys())[0]164 token_value = list(token.values())[0]165 166 if(token_type == "hashOperator"):167 168 token = lexer()169 token_type = list(token.keys())[0]170 token_value = list(token.values())[0]171 172 if(token_type == "keyword" and token_value == "include"):173 174 token = lexer()175 token_type = list(token.keys())[0]176 token_value = list(token.values())[0]177 178 if(token_type == "relationalOperator" and token_value == "<"):179 180 token = lexer()181 token_type = list(token.keys())[0]182 token_value = list(token.values())[0]183 184 if(token_type == "identifier"):185 186 token = lexer()187 token_type = list(token.keys())[0]188 token_value = list(token.values())[0]189 190 191 if(token_type == "headerExtension"):192 193 token = lexer()194 token_type = list(token.keys())[0]195 token_value = list(token.values())[0] 196 197 if(token_type == "relationalOperator" and token_value == ">"):198 199 status = preProcessorDirective()200 #print(str(status) + " after return\n")201 202 else:203 print("Syntax error: expected '>' but received " + str(token_value) + "\n")204 status = 1205 else:206 print("Syntax error: expected 'Header Extension' but received " + str(token_value) + "\n")207 status = 1208 209 else:210 print("Syntax error: expected 'Identifer' but received " + str(token_value) + "\n")211 status = 1212 else: 213 print("Syntax error: expected '<' but received " + str(token_value) + "\n")214 status = 1215 216 elif(token_type == "keyword" and token_value == "define"):217 218 219 token = lexer()220 token_type = list(token.keys())[0]221 token_value = list(token.values())[0]222 223 if(token_type == "identifier"):224 225 variableName = token_value226 token = lexer()227 token_type = list(token.keys())[0]228 token_value = list(token.values())[0]229 230 if(token_type == "number"):231 232 variableValue = int(token_value.strip())233 symbolTable[variableName] = variableValue234 status = preProcessorDirective()235 236 237 else:238 print("Syntax error: expected 'Number' but received " + str(token_value) + "\n")239 status = 1240 else:241 print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")242 status = 1243 244 else:245 print("Syntax error: expected 'Keyword include/define' but received " + str(token_value) + "\n")246 status = 1247 else:248 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED249 global lb, fp250 lb = lb - len(token_value)251 fp = fp - len(token_value)252 253 return status254 #print("Token key: " + str((token_type) + " values: " + str(token_value) + "\n")) 255def externDeclaration():256 257 258 status = 0259 token = lexer()260 token_type = list(token.keys())[0]261 token_value = list(token.values())[0]262 if(token_type == "keyword" and token_value == "extern"):263 status = declarationStatement()264 if(status == 0):265 266 token = lexer()267 token_type = list(token.keys())[0]268 token_value = list(token.values())[0].strip()269 if(not (token_type == "punctuator" and token_value == ";")):270 print("Syntax error: expected 'Punctuator Semicolon1' but received " + str(token_value) + "\n")271 status = 1272 else:273 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED274 global lb, fp275 lb = lb - len(token_value)276 fp = fp - len(token_value) 277 return status278def declarationStatement():279 280 status = 0281 token = lexer()282 token_type = list(token.keys())[0]283 token_value = list(token.values())[0]284 if(token_type == 'dataType'):285 286 dataType = token_value.strip()287 status = variable(dataType)288 289 else:290 print("Syntax error: expected 'Data Type' but received " + str(token_value) + "\n")291 status = 1292 293 return status294 295def optionalDeclarationStatement():296 297 #print("IN OPTDECL")298 status = 0299 token = lexer()300 token_type = list(token.keys())[0]301 token_value = list(token.values())[0]302 #print("before reset: " + str(token_value))303 if(token_type == 'dataType'):304 305 306 dataType = token_value.strip()307 status = variable(dataType)308 309 else:310 311 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED312 #print("resetting")313 global lb, fp314 lb = lb - len(token_value)315 fp = fp - len(token_value)316 status = 2317 """318 if(token_value != "do"):319 token = lexer()320 token_type = list(token.keys())[0]321 token_value = list(token.values())[0]322 """323 #print("after reset: " + str(token_value))324 return status325 326 327def variable(dataType):328 status = 0329 token = lexer()330 token_type = list(token.keys())[0]331 token_value = list(token.values())[0]332 333 if(token_type == 'identifier'):334 335 #print("received identifier, " + str(token_value))336 variableName = token_value.strip()337 338 if(dataType not in externalVariables):339 externalVariables[dataType] = list()340 341 if(variableName not in externalVariables[dataType]):342 externalVariables[dataType].append(variableName)343 else:344 print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")345 status = 1346 #externalVariables.append([variableName, dataType])347 if(status==0):348 status = variableDash(dataType)349 else:350 print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")351 status = 1352 353 return status354def variableDash(dataType):355 status = 0356 token = lexer()357 token_type = list(token.keys())[0]358 token_value = list(token.values())[0]359 360 if(token_type == 'punctuator' and token_value == ','):361 362 token = lexer()363 token_type = list(token.keys())[0]364 token_value = list(token.values())[0]365 366 if(token_type == 'identifier'):367 368 variableName = token_value.strip()369 if(dataType not in externalVariables):370 externalVariables[dataType] = list() 371 372 if(variableName not in externalVariables[dataType]):373 externalVariables[dataType].append(variableName)374 else:375 print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")376 status = 1377 if(status==0):378 variableDash(dataType)379 380 else:381 print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")382 status = 1383 else:384 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED385 global lb, fp386 #print(token_value)387 #print(str(lb) + " " + str(fp))388 lb = lb - len(token_value)389 fp = fp - len(token_value)390 #print(str(lb) + " " + str(fp))391 return status392 393def mainFunction():394 status = 0395 token = lexer()396 token_type = list(token.keys())[0]397 token_value = list(token.values())[0]398 399 if(token_type == "dataType" and token_value == "int"):400 401 status = mainDash()402 403 else:404 print("Syntax error: expected 'Return Type Integer' but received " + str(token_value) + "\n")405 status = 1406 407 return status408 409 410def mainDash():411 status = 0412 token = lexer()413 token_type = list(token.keys())[0]414 token_value = list(token.values())[0].strip()415 416 #print(str(token_type) + " " + str(token_value))417 418 if(token_type == "identifier" and token_value == "main"):419 420 token = lexer()421 token_type = list(token.keys())[0]422 token_value = list(token.values())[0].strip()423 424 if(token_type == "punctuator" and token_value == "("):425 426 token = lexer()427 token_type = list(token.keys())[0]428 token_value = list(token.values())[0].strip()429 430 if(token_type == "punctuator" and token_value == ")"):431 432 token = lexer()433 token_type = list(token.keys())[0]434 token_value = list(token.values())[0].strip()435 436 if(token_type == "punctuator" and token_value == "{"):437 438 status = statements()439 440 if(status == 0):441 442 token = lexer()443 token_type = list(token.keys())[0]444 token_value = list(token.values())[0].strip()445 #print(token_value + str(len(token_value)))446 if(not(token_type == "punctuator" and token_value == "}")):447 print("Syntax error: expected 'Punctuator1 close curly bracket' but received " + str(token_value) + "\n")448 status = 1449 else:450 print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")451 status = 1452 453 454 455 elif(token_type == "dataType" and token_value == "int"):456 457 token = lexer()458 token_type = list(token.keys())[0]459 token_value = list(token.values())[0].strip()460 461 if(token_type == "identifier" and token_value == "argc"):462 463 token = lexer()464 token_type = list(token.keys())[0].strip()465 token_value = list(token.values())[0].strip()466 467 if(token_type == "punctuator" and token_value == ","):468 469 token = lexer()470 token_type = list(token.keys())[0]471 token_value = list(token.values())[0].strip()472 473 if(token_type == "dataType" and token_value == "char"):474 475 token = lexer()476 token_type = list(token.keys())[0]477 token_value = list(token.values())[0].strip()478 479 if(token_type == "arithmeticOperator" and token_value == "*"):480 481 token = lexer()482 token_type = list(token.keys())[0]483 token_value = list(token.values())[0] .strip()484 485 if(token_type == "identifier" and token_value == "argv"):486 487 token = lexer()488 token_type = list(token.keys())[0]489 token_value = list(token.values())[0].strip()490 491 if(token_type == "punctuator" and token_value == "["):492 493 token = lexer()494 token_type = list(token.keys())[0]495 token_value = list(token.values())[0].strip()496 497 if(token_type == "punctuator" and token_value == "]"):498 499 token = lexer()500 token_type = list(token.keys())[0]501 token_value = list(token.values())[0].strip()502 503 if(token_type == "punctuator" and token_value == ")"):504 505 token = lexer()506 token_type = list(token.keys())[0]507 token_value = list(token.values())[0].strip()508 509 if(token_type == "punctuator" and token_value == "{"):510 511 status = statements()512 513 if(status == 0):514 515 token = lexer()516 token_type = list(token.keys())[0]517 token_value = list(token.values())[0].strip()518 519 if(not(token_type == "punctuator" and token_value == "}")):520 print("Syntax error: expected 'Punctuator2 close curly bracket' ", end = "")521 print("but received " + str(token_value) + "\n")522 status = 1523 else:524 print("Syntax error: expected 'Punctuator open curly bracket' ", end = "")525 print("but received " + str(token_value) + "\n")526 status = 1527 528 else:529 print("Syntax error: expected 'Punctuator close round bracket' but received ", end = "")530 print(str(token_value) + "\n")531 status = 1532 533 else:534 print("Syntax error: expected 'Punctuator close square bracket' but received ", end = "")535 print(str(token_value) + "\n")536 status = 1537 else:538 print("Syntax error: expected 'Punctuator open square bracket' but received ", end = "")539 print(str(token_value) + "\n")540 status = 1541 542 else:543 print("Syntax error: expected 'Identifier argv' but received " + str(token_value) + "\n")544 status = 1545 546 else:547 print("Syntax error: expected 'Pointer operator *' but received " + str(token_value) + "\n")548 status = 1549 550 else:551 print("Syntax error: expected 'Data type character' but received " + str(token_value) + "\n")552 status = 1553 554 else:555 print("Syntax error: expected 'Punctuator comma' but received " + str(token_value) + "\n")556 status = 1 557 558 else:559 print("Syntax error: expected 'Identifier argc' but received " + str(token_value) + "\n")560 status = 1561 562 563 else:564 print("Syntax error: expected 'Punctuator close round bracket' but received " + str(token_value) + "\n")565 status = 1566 567 else:568 print("Syntax error: expected 'Punctuator open round bracket' but received " + str(token_value) + "\n")569 status = 1570 571 else:572 print("Syntax error: expected 'Identifier main' but received " + str(token_value) + "\n")573 status = 1574 575 return status576data = {}577def statements():578 579 580 #print("top of statements\n")581 status = 0582 status = initializationStatement()583 584 if(status == 0):585 #print("init success")586 token = lexer()587 token_type = list(token.keys())[0]588 token_value = list(token.values())[0]589 #print(token_value +" new value")590 tv = token_value.strip()591 if(token_type == "punctuator" and tv == ";"):592 status = statements()593 else:594 print("Syntax error: expected 'Punctuator semicolon2' but received " + str(token_value) + "\n")595 status = 1596 597 598 else:599 ''' token = lexer()600 token_type = list(token.keys())[0]601 token_value = list(token.values())[0]602 tv = token_value.strip()'''603 #print("dc" + " " + tv)604 605 606 status = optionalDeclarationStatement()607 #print(status)608 if(status == 0): 609 #print("decl success")610 611 token = lexer()612 token_type = list(token.keys())[0]613 token_value = list(token.values())[0]614 tv = token_value.strip()615 if(token_type == "punctuator" and tv == ";"):616 617 status = statements()618 else:619 print("Syntax error: expected 'Punctuator semicolon3' but received " + str(token_value) + "\n")620 status = 1621 else:622 623 status = assignmentStatement()624 if(status == 0):625 #print("assgn success")626 627 token = lexer()628 token_type = list(token.keys())[0]629 token_value = list(token.values())[0]630 tv = token_value.strip()631 if(token_type == "punctuator" and tv == ";"):632 status = statements()633 else:634 print("Syntax error: expected 'Punctuator semicolon4' but received " + str(token_value) + "\n")635 status = 1636 else:637 638 status = 0639 token = lexer()640 token_type = list(token.keys())[0]641 token_value = list(token.values())[0]642 #print("IN statements: " + token_value)643 global li644 #print(str(li))645 if(token_type == "keyword" and token_value == "do"):646 #print("Do")647 global do648 do = do+1649 token = lexer()650 token_type = list(token.keys())[0]651 token_value = list(token.values())[0].strip()652 lab1()653 if(token_type == "punctuator" and token_value == "{"):654 #print("{")655 status = statements()656 657 #print("status: " + str(status))658 if(status == 0):659 660 token = lexer()661 token_type = list(token.keys())[0]662 token_value = list(token.values())[0].strip()663 #print(token_value)664 if(token_type == "punctuator" and token_value == "}"):665 #print("}")666 token = lexer()667 token_type = list(token.keys())[0]668 token_value = list(token.values())[0].strip()669 do = do-1670 if(do==0):671 print("main --> do")672 else:673 print("do --> do")674 if(token_type == "keyword" and token_value == "while"):675 #print("while")676 token = lexer()677 token_type = list(token.keys())[0]678 token_value = list(token.values())[0].strip()679 680 if(token_type == "punctuator" and token_value == "("):681 #print("(")682 status = condition()683 lab2()684 if(status == 0):685 686 token = lexer()687 token_type = list(token.keys())[0]688 token_value = list(token.values())[0].strip()689 690 if(token_type == "punctuator" and token_value == ")"):691 #print(")")692 693 global wh694 print("while --> "+wh[0])695 token = lexer()696 token_type = list(token.keys())[0]697 token_value = list(token.values())[0].strip()698 699 if(token_type == "punctuator" and token_value == ";"):700 #print("in statements: " + token_value + "\n")701 status = statements()702 if(do==0):703 print("main --> while")704 else:705 print("do --> while")706 707 else:708 print("Syntax error: expected 'Punctuator semicolon5' ", end = "")709 print("but received " + str(token_value) + "\n")710 status = 1711 712 else:713 print("Syntax error: expected 'Punctuator close round bracket' ", end = "")714 print("but received " + str(token_value) + "\n")715 status = 1716 717 else:718 print("Syntax error: expected 'Punctuator open round bracket' ", end = "") 719 print("but received " + str(token_value) + "\n")720 status = 1721 722 else:723 print("Syntax error: expected 'Keyword while' but received " + str(token_value) + "\n")724 status = 1725 726 else:727 print("Syntax error: expected 'Punctuator10 close curly bracket' but received " + str(token_value) + "\n")728 status = 1729 730 else:731 print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")732 status = 1733 734 else:735 status = 0736 tv = token_value.strip()737 #print("IN statements: " + token_value)738 if(tv == "{"):739 status = statements()740 741 #print("status: " + str(status))742 if(status == 0):743 744 token = lexer()745 token_type = list(token.keys())[0]746 token_value = list(token.values())[0].strip()747 #print(token_value)748 if(token_type == "punctuator" and token_value == "}"):749 status = statements()750 else:751 print("Error")752 else:753 754 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED755 global lb, fp756 #print(token_value)757 #print(str(lb) + " " + str(fp))758 lb = lb - len(token_value)759 fp = fp - len(token_value)760 761 return status762def initializationStatement():763 status = 0764 765 global lb, fp766 767 token = lexer()768 token_type = list(token.keys())[0]769 token_value = list(token.values())[0]770 if(token_type == "dataType"):771 if(token_value not in data):772 data[token_value] = {};773 #print(token_value)774 775 status = initStat(token_value)776 777 778 else:779 780 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED781 #print(token_value)782 #print(str(lb) + " " + str(fp))783 lb = lb - len(token_value)784 fp = fp - len(token_value)785 status = 2786 #print('returning' + str(status)) 787 return status788 789 790def initStat(dt):791 status = multipleInitialization(dt)792 #print(status)793 794 795 796 '''if(status != 0 and status != 2):797 status = 0798 token = lexer()799 token_type = list(token.keys())[0]800 token_value = list(token.values())[0]801 tk = token_value802 if(token_type == "identifier"):803 804 if(token_value not in data[dt]):805 data[dt][token_value]=0806 else:807 print("Syntax Error: The variable has already been initialized\n")808 return 1809 token = lexer()810 token_type = list(token.keys())[0]811 token_value = list(token.values())[0]812 813 if(token_type == "assignmentOperator" and token_value == "="):814 815 status = E(dt,tk)816 """817 print(status)818 status = 0819 token = lexer()820 token_type = list(token.keys())[0]821 token_value = list(token.values())[0]822 print(token_value)823 """824 825 elif(token_type == "punctuator" and token_value == ","):826 827 global lb, fp828 #print(token_value)829 #print(str(lb) + " " + str(fp))830 lb = lb - len(token_value)831 fp = fp - len(token_value)832 status = 2833 834 else:835 836 print("Syntax error: expected 'Assignment1 Operator' but received " + str(token_value) + "\n")837 status = 1 '''838 839 840 return status841 842def multipleInitialization(dt):843 global data844 status = 0845 token = lexer()846 token_type = list(token.keys())[0]847 token_value = list(token.values())[0]848 tk = token_value849 if(token_type == "identifier"):850 push(tk)851 #print(tk)852 if(token_value not in data[dt]):853 data[dt][token_value]=0854 #print(" "+token_value +":)")855 else:856 print("Syntax Error: The variable has already been initialized\n")857 return 1858 859 token = lexer()860 token_type = list(token.keys())[0]861 token_value = list(token.values())[0]862 tv = token_value.strip()863 #print(token_value+" macha")864 if(tv == ";"):865 #print("; la")866 global lb, fp867 #print(token_value)868 #print(str(lb) + " " + str(fp))869 lb = lb - len(token_value)870 fp = fp - len(token_value)871 return 0;872 elif(token_type == "assignmentOperator" and tv == "="):873 874 status = E(dt,tk)875 codegen_assign()876 #print(status)877 878 if(status == 0):879 880 status = multinit(dt)881 if(status == 2):882 status = 0883 #print(status)884 elif(token_type == "punctuator" and tv == ","):885 #print(",")886 status = multipleInitialization(dt)887 '''global lb, fp888 #print(token_value)889 #print(str(lb) + " " + str(fp))890 lb = lb - len(token_value)891 fp = fp - len(token_value)892 status = 2 '''893 894 else:895 896 print("Syntax error: expected 'Assignment2 Operator' but received " + str(tv) + "\n")897 status = 1898 else:899 900 print("Syntax error: expected 'Identifier' but received " + str(tv) + "\n")901 status = 1902 903 return status904 905def multinit(dt):906 status = 0907 908 token = lexer()909 token_type = list(token.keys())[0]910 token_value = list(token.values())[0]911 tv = token_value.strip()912 913 if(token_type == "punctuator" and tv == ","):914 915 #print("got comma")916 status = multipleInitialization(dt)917 918 else:919 920 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED921 global lb, fp922 #print(token_value)923 #print(str(lb) + " " + str(fp))924 lb = lb - len(token_value)925 fp = fp - len(token_value)926 status = 2927 928 return status929def assignmentStatement():930 global data931 dty =''932 status = 0933 token = lexer()934 token_type = list(token.keys())[0]935 token_value = list(token.values())[0]936 tk = token_value937 #print("asgn")938 if(token_type == "identifier"):939 push(tk)940 #print(tk)941 for i in data:942 for j in data[i]:943 if(j==token_value):944 dty = i945 if(dty==''):946 print("The variable "+token_value+" has not been initialized.")947 return 1948 token = lexer()949 token_type = list(token.keys())[0]950 token_value = list(token.values())[0]951 952 if(token_type == "assignmentOperator" and token_value == "="):953 954 status = E(dty,tk)955 codegen_assign()956 957 else:958 959 print("Syntax error: expected 'Assignment3 Operator' but received " + str(token_value) + "\n")960 status = 1961 else:962 963 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED964 global lb, fp965 #print(token_value)966 #print(str(lb) + " " + str(fp))967 lb = lb - len(token_value)968 fp = fp - len(token_value)969 status = 2970 971 return status972def condition():973 status = 0974 975 status = C()976 977 return status978def C():979 status = 0980 token = lexer()981 token_type = list(token.keys())[0]982 token_value = list(token.values())[0]983 tv = token_value.strip()984 if(token_type == "identifier" or token_type=="number"):985 push(tv)986 token = lexer()987 token_type = list(token.keys())[0]988 token_value = list(token.values())[0]989 tk = token_value.strip()990 if(token_type == "relationalOperator" or token_type == "logicalOperator"):991 push(tk)992 status = C() 993 elif(token_value == ")"):994 global lb, fp995 #print(token_value)996 #print(str(lb) + " " + str(fp))997 lb = lb - len(token_value)998 fp = fp - len(token_value)999 return 01000 else:1001 return 11002 elif(not (token_type == "boolean")):1003 1004 print("Syntax error: expected 'Boolean' but received " + str(token_value) + "\n")1005 status = 11006 return status1007op = ""1008def E(dt,vn):1009 status = F(dt,vn)1010 if(status == 0):1011 1012 status = E1(dt,vn)1013 1014 return status1015 1016def E1(dt,vn):1017 status = 01018 token = lexer()1019 token_type = list(token.keys())[0]1020 token_value = list(token.values())[0]1021 tv = token_value.strip()1022 global op;1023 if(token_type == "arithmeticOperator" and tv == "+"):1024 op = "+"1025 push(tv)1026 #print(tv)1027 status = F(dt,vn)1028 codegen()1029 if(status == 0):1030 1031 status = E1(dt,vn)1032 1033 else:1034 1035 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1036 global lb, fp1037 #print(token_value)1038 #print(str(lb) + " " + str(fp))1039 lb = lb - len(token_value)1040 fp = fp - len(token_value)1041 return status1042def F(dt,vn):1043 status = 01044 1045 status = G(dt,vn)1046 1047 if(status == 0):1048 1049 status = F1(dt,vn)1050 return status1051 1052def F1(dt,vn):1053 status = 01054 token = lexer()1055 token_type = list(token.keys())[0]1056 token_value = list(token.values())[0]1057 tv = token_value.strip()1058 global op;1059 if(token_type == "arithmeticOperator" and tv == "-"):1060 op = "-"1061 push(tv)1062 #print(tv)1063 status = G(dt,vn)1064 codegen()1065 1066 if(status == 0):1067 1068 status = F1(dt,vn)1069 1070 else:1071 1072 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1073 global lb, fp1074 #print(token_value)1075 #print(str(lb) + " " + str(fp))1076 lb = lb - len(token_value)1077 fp = fp - len(token_value)1078 return status1079 1080def G(dt,vn):1081 status = 01082 1083 status = H(dt,vn)1084 if(status == 0):1085 1086 status = G1(dt,vn)1087 return status1088def G1(dt,vn):1089 status = 01090 1091 token = lexer()1092 token_type = list(token.keys())[0]1093 token_value = list(token.values())[0]1094 tv = token_value.strip()1095 global op;1096 if(token_type == "arithmeticOperator" and tv == "*"):1097 push(tv)1098 #print(tv)1099 op = "*"1100 status = H(dt,vn)1101 codegen()1102 if(status == 0):1103 1104 status = G1(dt,vn)1105 1106 else:1107 1108 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1109 global lb, fp1110 #print(token_value)1111 #print(str(lb) + " " + str(fp))1112 lb = lb - len(token_value)1113 fp = fp - len(token_value)1114 return status1115 1116def H(dt,vn):1117 status = 01118 1119 status = I(dt,vn)1120 1121 if(status == 0):1122 1123 status = H1(dt,vn)1124 return status1125 1126def H1(dt,vn):1127 status = 01128 1129 token = lexer()1130 token_type = list(token.keys())[0]1131 token_value = list(token.values())[0]1132 tv = token_value.strip()1133 1134 if(token_type == "arithmeticOperator" and tv == "/"):1135 global op;1136 op = "d";1137 push(tv)1138 #print(tv)1139 status = I(dt,vn)1140 codegen()1141 if(status == 0):1142 1143 status = H1(dt,vn)1144 1145 else:1146 1147 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1148 global lb, fp1149 #print(token_value + ":::" + str(len(token_value)))1150 #print(str(lb) + " " + str(fp))1151 1152 lb = lb - len(token_value)1153 fp = fp - len(token_value)1154 return status1155 1156def I(dt,vn):1157 global data1158 status = 01159 chk = 01160 token = lexer()1161 token_type = list(token.keys())[0]1162 token_value = list(token.values())[0]1163 tv = token_value.strip()1164 1165 if(token_type == "arithmeticOperator" and tv == "-"):1166 chk = 11167 push(tv)1168 #print(tv)1169 status = I()1170 codegen_umin()1171 elif(not(token_type == "identifier" or token_type == "number")):1172 print("Syntax error: expected 'Identifier/Number' but received " + str(token_value) + "\n")1173 status = 11174 return status1175 if(token_type == "identifier" or token_type == "number"):1176 push(tv)1177 #print(tv)1178 global op;1179 if(op==""):1180 if(token_type == "identifier"):1181 if(chk==1):1182 data[dt][vn]=-1*data[dt][token_value]1183 chk = 01184 else:1185 #print(token_value)1186 data[dt][vn]=data[dt][token_value]1187 1188 if(token_type == "number"):1189 if(chk==1):1190 data[dt][vn]=-1*float(token_value)1191 chk = 01192 else:1193 data[dt][vn]=float(token_value)1194 elif(op=="d"):1195 if(token_type == "identifier"):1196 if(chk==1):1197 data[dt][vn]/=-1*data[dt][token_value]1198 chk = 01199 op=""1200 else:1201 data[dt][vn]/=data[dt][token_value]1202 op=""1203 1204 if(token_type == "number"):1205 if(chk==1):1206 data[dt][vn]/=-1*float(token_value)1207 chk = 01208 op = ""1209 else:1210 data[dt][vn]/=float(token_value)1211 op = ""1212 elif(op=="*"):1213 if(token_type == "identifier"):1214 if(chk==1):1215 data[dt][vn]*=-1*data[dt][token_value]1216 chk = 01217 op=""1218 else:1219 data[dt][vn]*=data[dt][token_value]1220 op=""1221 1222 if(token_type == "number"):1223 if(chk==1):1224 data[dt][vn]*=-1*float(token_value)1225 chk = 01226 op = ""1227 else:1228 data[dt][vn]*=float(token_value)1229 op = ""1230 elif(op=="-"):1231 if(token_type == "identifier"):1232 if(chk==1):1233 data[dt][vn]-=-1*data[dt][token_value]1234 chk = 01235 op=""1236 else:1237 data[dt][vn]-=data[dt][token_value]1238 op=""1239 1240 if(token_type == "number"):1241 if(chk==1):1242 data[dt][vn]-=-1*float(token_value)1243 chk = 01244 op = ""1245 else:1246 data[dt][vn]-=float(token_value)1247 op = ""1248 elif(op=="+"):1249 if(token_type == "identifier"):1250 if(chk==1):1251 data[dt][vn]+=-1*data[dt][token_value]1252 chk = 01253 op=""1254 else:1255 data[dt][vn]+=data[dt][token_value]1256 op=""1257 1258 if(token_type == "number"):1259 if(chk==1):1260 data[dt][vn]+=-1*float(token_value)1261 chk = 01262 op = ""1263 else:1264 data[dt][vn]+=float(token_value)1265 op = ""1266 return status1267 1268 1269 1270prg = open("nocomments.c").read()1271symbolTable = dict()1272externalVariables = dict()1273localVariables = list()1274keyword = ["include", "define", "while", "do", "for", "return", "extern"]1275dataType = ["void", "int", "short", "long", "char", "float", "double"]1276preDefRoutine = ["printf", "scanf"]1277#headerFile = ["stdio.h", "stdlib.h", "math.h", "string.h"]1278identifier = "^[^\d\W]\w*\Z"1279punctuator = "^[()[\]{};.,]$"1280aritmeticOperator = "^[-+*/]$"1281assignmentOperator = "^=$"1282relationalOperator = ["<", ">", "<=", ">=", "==", "!="]1283logicalOperator = ["&&", "||", "!"]1284number = "^\d+$"1285spaces = "[' ''\n''\t']"1286loadSymbolTable()1287parse_start()1288'''1289for i in data:1290 for j in data[i]:1291 print(i+" "+j+" "+str(data[i][j]))1292'''1293"""1294while lb!=len(prg):1295 lexer()1296"""1297#print(symbolTable)1298#print(externalVariables)1299"""1300PARSER ERROR CODES:13010-SUCCESS13021-FAILURE1303"""1304 ...

Full Screen

Full Screen

lex.py

Source:lex.py Github

copy

Full Screen

1import re2#import ply.lex as lex3def loadSymbolTable():4 5 symbolTable["keyword"] = keyword6 symbolTable["dataType"] = dataType7 symbolTable["preDefRoutine"] = preDefRoutine8lb = 09fp = 110def validLexeme(string):11 12 res = False13 if(string in keyword):14 #print("key " + string + "\n")15 res = "keyword"16 elif(string in dataType):17 #print("dataType " + string + "\n")18 res = "dataType"19 elif(string in preDefRoutine):20 res = "preDefRoutine"21 elif(re.match(identifier, string)):22 #print("id " + string + "\n")23 res = "identifier"24 elif(re.match(punctuator, string)):25 #print("punc " + string)26 res = "punctuator"27 elif(re.match(number, string)):28 res = "number"29 elif(re.match(aritmeticOperator, string)):30 res = "arithmeticOperator"31 elif(re.match(assignmentOperator, string)):32 res = "assignmentOperator"33 elif(string in relationalOperator):34 res = "relationalOperator"35 elif(string in logicalOperator):36 res = "logicalOperator"37 elif(string == "#"):38 res = "hashOperator"39 elif(string == ".h"):40 res = "headerExtension"41 elif(string == "true" or string == "false"):42 res = "boolean"43 elif(string == "++"):44 res = "incrementOperator"45 elif(string == "--"):46 res = "decrementOperator"47 return res48top = 0;49i_ = 1;50tmp = "";51li = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]52def push(val):53 global top,li54 top = top+155 li[top]=val;56 57def codegen():58 global tmp,i_,top,li59 tmp = "t"60 tmp+=str(i_)61 print(tmp +" = "+str(li[top-2]), str(li[top-1]), str(li[top]));62 top-=2;63 li[top]=tmp64 i_=i_+1;65def codegen_umin():66 global tmp,i_,top,li67 tmp = "t"68 tmp+=str(i_)69 print(tmp+" = -"+str(li[top]));70 top=top-1;71 li[top]=tmp;72 i_=i_+1;73def codegen_assign():74 global tmp,i_,top,li75 print(str(li[top-1])+" = "+str(li[top]));76 top=top-2;77label = 178def lab1():79 global label80 print("L"+str(label)+":")81 label = label+182 83def lab2():84 global tmp,i_,top,li,label85 tmp = "t"86 tmp+=str(i_)87 print(tmp+" = "+li[top-2],li[top-1],li[top]);88 print("if "+tmp+" goto L"+str(label-1));89 i_=i_+1;90 label = label-1;91 top = top-3;92def lexer():93 global lb94 global fp95 96 lexeme = prg[lb:fp]97 98 while(re.match(spaces, lexeme)):99 #print("x " + lexeme + "\n")100 lb = lb + 1101 fp = fp + 1102 lexeme = prg[lb:fp]103 104 #if(re.match(spaces, prg[105 #print("lexeme: " + lexeme + " type: " + str(type(lexeme)) + "\n");106 res = validLexeme(lexeme)107 while((not res) and (fp <= len(prg))):108 #print("lexeme1: " + lexeme + "\n")109 fp = fp + 1110 lexeme = prg[lb:fp]111 res = validLexeme(lexeme)112 113 #print(lexeme + "\n")114 tokenType = res115 res = validLexeme(lexeme)116 while((res) and (fp <= len(prg))):117 #print("lexeme2: " + lexeme + "\n")118 fp = fp + 1119 lexeme = prg[lb:fp]120 tokenType = res121 res = validLexeme(lexeme)122 123 lexeme = prg[lb:fp - 1]124 lb = fp - 1125 126 if((tokenType != False) and (tokenType not in symbolTable)):127 symbolTable[tokenType] = list()128 129 if((tokenType != False) and lexeme not in symbolTable[tokenType]):130 symbolTable[tokenType].append(lexeme.strip())131 132 #print("TOKEN: " + str(lexeme) + " TYPE: " + str(tokenType) + "\n");133 #print(str(lb) + " " + str(fp) + "\n")134 #print(str(len(prg)))135 return dict({tokenType:lexeme})136def parse_start():137 status = program()138 139 print("SUCCESSFUL PARSING\n") if(status == 0) else print("FAILED PARSING\n")140 141def program():142 status = preProcessorDirective()143 144 if(status == 0):145 status = externDeclaration()146 147 if(status == 0):148 status = mainFunction()149 150 return status151def preProcessorDirective():152 status = 0153 token = lexer()154 155 token_type = list(token.keys())[0]156 token_value = list(token.values())[0]157 158 if(token_type == "hashOperator"):159 160 token = lexer()161 token_type = list(token.keys())[0]162 token_value = list(token.values())[0]163 164 if(token_type == "keyword" and token_value == "include"):165 166 token = lexer()167 token_type = list(token.keys())[0]168 token_value = list(token.values())[0]169 170 if(token_type == "relationalOperator" and token_value == "<"):171 172 token = lexer()173 token_type = list(token.keys())[0]174 token_value = list(token.values())[0]175 176 if(token_type == "identifier"):177 178 token = lexer()179 token_type = list(token.keys())[0]180 token_value = list(token.values())[0]181 182 183 if(token_type == "headerExtension"):184 185 token = lexer()186 token_type = list(token.keys())[0]187 token_value = list(token.values())[0] 188 189 if(token_type == "relationalOperator" and token_value == ">"):190 191 status = preProcessorDirective()192 #print(str(status) + " after return\n")193 194 else:195 print("Syntax error: expected '>' but received " + str(token_value) + "\n")196 status = 1197 else:198 print("Syntax error: expected 'Header Extension' but received " + str(token_value) + "\n")199 status = 1200 201 else:202 print("Syntax error: expected 'Identifer' but received " + str(token_value) + "\n")203 status = 1204 else: 205 print("Syntax error: expected '<' but received " + str(token_value) + "\n")206 status = 1207 208 elif(token_type == "keyword" and token_value == "define"):209 210 211 token = lexer()212 token_type = list(token.keys())[0]213 token_value = list(token.values())[0]214 215 if(token_type == "identifier"):216 217 variableName = token_value218 token = lexer()219 token_type = list(token.keys())[0]220 token_value = list(token.values())[0]221 222 if(token_type == "number"):223 224 variableValue = int(token_value.strip())225 symbolTable[variableName] = variableValue226 status = preProcessorDirective()227 228 229 else:230 print("Syntax error: expected 'Number' but received " + str(token_value) + "\n")231 status = 1232 else:233 print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")234 status = 1235 236 else:237 print("Syntax error: expected 'Keyword include/define' but received " + str(token_value) + "\n")238 status = 1239 else:240 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED241 global lb, fp242 lb = lb - len(token_value)243 fp = fp - len(token_value)244 245 return status246 #print("Token key: " + str((token_type) + " values: " + str(token_value) + "\n")) 247def externDeclaration():248 249 250 status = 0251 token = lexer()252 token_type = list(token.keys())[0]253 token_value = list(token.values())[0]254 if(token_type == "keyword" and token_value == "extern"):255 status = declarationStatement()256 if(status == 0):257 258 token = lexer()259 token_type = list(token.keys())[0]260 token_value = list(token.values())[0].strip()261 if(not (token_type == "punctuator" and token_value == ";")):262 print("Syntax error: expected 'Punctuator Semicolon1' but received " + str(token_value) + "\n")263 status = 1264 else:265 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED266 global lb, fp267 lb = lb - len(token_value)268 fp = fp - len(token_value) 269 return status270def declarationStatement():271 272 status = 0273 token = lexer()274 token_type = list(token.keys())[0]275 token_value = list(token.values())[0]276 if(token_type == 'dataType'):277 278 dataType = token_value.strip()279 status = variable(dataType)280 281 else:282 print("Syntax error: expected 'Data Type' but received " + str(token_value) + "\n")283 status = 1284 285 return status286 287def optionalDeclarationStatement():288 289 #print("IN OPTDECL")290 status = 0291 token = lexer()292 token_type = list(token.keys())[0]293 token_value = list(token.values())[0]294 #print("before reset: " + str(token_value))295 if(token_type == 'dataType'):296 297 298 dataType = token_value.strip()299 status = variable(dataType)300 301 else:302 303 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED304 #print("resetting")305 global lb, fp306 lb = lb - len(token_value)307 fp = fp - len(token_value)308 status = 2309 """310 if(token_value != "do"):311 token = lexer()312 token_type = list(token.keys())[0]313 token_value = list(token.values())[0]314 """315 #print("after reset: " + str(token_value))316 return status317 318 319def variable(dataType):320 status = 0321 token = lexer()322 token_type = list(token.keys())[0]323 token_value = list(token.values())[0]324 325 if(token_type == 'identifier'):326 327 #print("received identifier, " + str(token_value))328 variableName = token_value.strip()329 330 if(dataType not in externalVariables):331 externalVariables[dataType] = list()332 333 if(variableName not in externalVariables[dataType]):334 externalVariables[dataType].append(variableName)335 else:336 print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")337 status = 1338 #externalVariables.append([variableName, dataType])339 if(status==0):340 status = variableDash(dataType)341 else:342 print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")343 status = 1344 345 return status346def variableDash(dataType):347 status = 0348 token = lexer()349 token_type = list(token.keys())[0]350 token_value = list(token.values())[0]351 352 if(token_type == 'punctuator' and token_value == ','):353 354 token = lexer()355 token_type = list(token.keys())[0]356 token_value = list(token.values())[0]357 358 if(token_type == 'identifier'):359 360 variableName = token_value.strip()361 if(dataType not in externalVariables):362 externalVariables[dataType] = list() 363 364 if(variableName not in externalVariables[dataType]):365 externalVariables[dataType].append(variableName)366 else:367 print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")368 status = 1369 if(status==0):370 variableDash(dataType)371 372 else:373 print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")374 status = 1375 else:376 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED377 global lb, fp378 #print(token_value)379 #print(str(lb) + " " + str(fp))380 lb = lb - len(token_value)381 fp = fp - len(token_value)382 #print(str(lb) + " " + str(fp))383 return status384 385def mainFunction():386 status = 0387 token = lexer()388 token_type = list(token.keys())[0]389 token_value = list(token.values())[0]390 391 if(token_type == "dataType" and token_value == "int"):392 393 status = mainDash()394 395 else:396 print("Syntax error: expected 'Return Type Integer' but received " + str(token_value) + "\n")397 status = 1398 399 return status400 401 402def mainDash():403 status = 0404 token = lexer()405 token_type = list(token.keys())[0]406 token_value = list(token.values())[0].strip()407 408 #print(str(token_type) + " " + str(token_value))409 410 if(token_type == "identifier" and token_value == "main"):411 412 token = lexer()413 token_type = list(token.keys())[0]414 token_value = list(token.values())[0].strip()415 416 if(token_type == "punctuator" and token_value == "("):417 418 token = lexer()419 token_type = list(token.keys())[0]420 token_value = list(token.values())[0].strip()421 422 if(token_type == "punctuator" and token_value == ")"):423 424 token = lexer()425 token_type = list(token.keys())[0]426 token_value = list(token.values())[0].strip()427 428 if(token_type == "punctuator" and token_value == "{"):429 430 status = statements()431 432 if(status == 0):433 434 token = lexer()435 token_type = list(token.keys())[0]436 token_value = list(token.values())[0].strip()437 #print(token_value + str(len(token_value)))438 if(not(token_type == "punctuator" and token_value == "}")):439 print("Syntax error: expected 'Punctuator1 close curly bracket' but received " + str(token_value) + "\n")440 status = 1441 else:442 print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")443 status = 1444 445 446 447 elif(token_type == "dataType" and token_value == "int"):448 449 token = lexer()450 token_type = list(token.keys())[0]451 token_value = list(token.values())[0].strip()452 453 if(token_type == "identifier" and token_value == "argc"):454 455 token = lexer()456 token_type = list(token.keys())[0].strip()457 token_value = list(token.values())[0].strip()458 459 if(token_type == "punctuator" and token_value == ","):460 461 token = lexer()462 token_type = list(token.keys())[0]463 token_value = list(token.values())[0].strip()464 465 if(token_type == "dataType" and token_value == "char"):466 467 token = lexer()468 token_type = list(token.keys())[0]469 token_value = list(token.values())[0].strip()470 471 if(token_type == "arithmeticOperator" and token_value == "*"):472 473 token = lexer()474 token_type = list(token.keys())[0]475 token_value = list(token.values())[0] .strip()476 477 if(token_type == "identifier" and token_value == "argv"):478 479 token = lexer()480 token_type = list(token.keys())[0]481 token_value = list(token.values())[0].strip()482 483 if(token_type == "punctuator" and token_value == "["):484 485 token = lexer()486 token_type = list(token.keys())[0]487 token_value = list(token.values())[0].strip()488 489 if(token_type == "punctuator" and token_value == "]"):490 491 token = lexer()492 token_type = list(token.keys())[0]493 token_value = list(token.values())[0].strip()494 495 if(token_type == "punctuator" and token_value == ")"):496 497 token = lexer()498 token_type = list(token.keys())[0]499 token_value = list(token.values())[0].strip()500 501 if(token_type == "punctuator" and token_value == "{"):502 503 status = statements()504 505 if(status == 0):506 507 token = lexer()508 token_type = list(token.keys())[0]509 token_value = list(token.values())[0].strip()510 511 if(not(token_type == "punctuator" and token_value == "}")):512 print("Syntax error: expected 'Punctuator2 close curly bracket' ", end = "")513 print("but received " + str(token_value) + "\n")514 status = 1515 else:516 print("Syntax error: expected 'Punctuator open curly bracket' ", end = "")517 print("but received " + str(token_value) + "\n")518 status = 1519 520 else:521 print("Syntax error: expected 'Punctuator close round bracket' but received ", end = "")522 print(str(token_value) + "\n")523 status = 1524 525 else:526 print("Syntax error: expected 'Punctuator close square bracket' but received ", end = "")527 print(str(token_value) + "\n")528 status = 1529 else:530 print("Syntax error: expected 'Punctuator open square bracket' but received ", end = "")531 print(str(token_value) + "\n")532 status = 1533 534 else:535 print("Syntax error: expected 'Identifier argv' but received " + str(token_value) + "\n")536 status = 1537 538 else:539 print("Syntax error: expected 'Pointer operator *' but received " + str(token_value) + "\n")540 status = 1541 542 else:543 print("Syntax error: expected 'Data type character' but received " + str(token_value) + "\n")544 status = 1545 546 else:547 print("Syntax error: expected 'Punctuator comma' but received " + str(token_value) + "\n")548 status = 1 549 550 else:551 print("Syntax error: expected 'Identifier argc' but received " + str(token_value) + "\n")552 status = 1553 554 555 else:556 print("Syntax error: expected 'Punctuator close round bracket' but received " + str(token_value) + "\n")557 status = 1558 559 else:560 print("Syntax error: expected 'Punctuator open round bracket' but received " + str(token_value) + "\n")561 status = 1562 563 else:564 print("Syntax error: expected 'Identifier main' but received " + str(token_value) + "\n")565 status = 1566 567 return status568data = {}569def statements():570 571 572 #print("top of statements\n")573 status = 0574 status = initializationStatement()575 576 if(status == 0):577 #print("init success")578 token = lexer()579 token_type = list(token.keys())[0]580 token_value = list(token.values())[0]581 #print(token_value +" new value")582 tv = token_value.strip()583 if(token_type == "punctuator" and tv == ";"):584 status = statements()585 else:586 print("Syntax error: expected 'Punctuator semicolon2' but received " + str(token_value) + "\n")587 status = 1588 589 590 else:591 ''' token = lexer()592 token_type = list(token.keys())[0]593 token_value = list(token.values())[0]594 tv = token_value.strip()'''595 #print("dc" + " " + tv)596 597 598 status = optionalDeclarationStatement()599 #print(status)600 if(status == 0): 601 #print("decl success")602 603 token = lexer()604 token_type = list(token.keys())[0]605 token_value = list(token.values())[0]606 tv = token_value.strip()607 if(token_type == "punctuator" and tv == ";"):608 609 status = statements()610 else:611 print("Syntax error: expected 'Punctuator semicolon3' but received " + str(token_value) + "\n")612 status = 1613 else:614 615 status = assignmentStatement()616 if(status == 0):617 #print("assgn success")618 619 token = lexer()620 token_type = list(token.keys())[0]621 token_value = list(token.values())[0]622 tv = token_value.strip()623 if(token_type == "punctuator" and tv == ";"):624 status = statements()625 else:626 print("Syntax error: expected 'Punctuator semicolon4' but received " + str(token_value) + "\n")627 status = 1628 else:629 630 status = 0631 token = lexer()632 token_type = list(token.keys())[0]633 token_value = list(token.values())[0]634 #print("IN statements: " + token_value)635 if(token_type == "keyword" and token_value == "do"):636 #print("Do")637 token = lexer()638 token_type = list(token.keys())[0]639 token_value = list(token.values())[0].strip()640 lab1()641 if(token_type == "punctuator" and token_value == "{"):642 #print("{")643 status = statements()644 645 #print("status: " + str(status))646 if(status == 0):647 648 token = lexer()649 token_type = list(token.keys())[0]650 token_value = list(token.values())[0].strip()651 #print(token_value)652 if(token_type == "punctuator" and token_value == "}"):653 #print("}")654 token = lexer()655 token_type = list(token.keys())[0]656 token_value = list(token.values())[0].strip()657 658 if(token_type == "keyword" and token_value == "while"):659 #print("while")660 token = lexer()661 token_type = list(token.keys())[0]662 token_value = list(token.values())[0].strip()663 664 if(token_type == "punctuator" and token_value == "("):665 #print("(")666 status = condition()667 lab2()668 if(status == 0):669 670 token = lexer()671 token_type = list(token.keys())[0]672 token_value = list(token.values())[0].strip()673 674 if(token_type == "punctuator" and token_value == ")"):675 #print(")")676 token = lexer()677 token_type = list(token.keys())[0]678 token_value = list(token.values())[0].strip()679 680 if(token_type == "punctuator" and token_value == ";"):681 #print("in statements: " + token_value + "\n")682 status = statements()683 684 else:685 print("Syntax error: expected 'Punctuator semicolon5' ", end = "")686 print("but received " + str(token_value) + "\n")687 status = 1688 689 else:690 print("Syntax error: expected 'Punctuator close round bracket' ", end = "")691 print("but received " + str(token_value) + "\n")692 status = 1693 694 else:695 print("Syntax error: expected 'Punctuator open round bracket' ", end = "") 696 print("but received " + str(token_value) + "\n")697 status = 1698 699 else:700 print("Syntax error: expected 'Keyword while' but received " + str(token_value) + "\n")701 status = 1702 703 else:704 print("Syntax error: expected 'Punctuator10 close curly bracket' but received " + str(token_value) + "\n")705 status = 1706 707 else:708 print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")709 status = 1710 711 else:712 713 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED714 global lb, fp715 #print(token_value)716 #print(str(lb) + " " + str(fp))717 lb = lb - len(token_value)718 fp = fp - len(token_value)719 720 return status721def initializationStatement():722 status = 0723 724 global lb, fp725 726 token = lexer()727 token_type = list(token.keys())[0]728 token_value = list(token.values())[0]729 if(token_type == "dataType"):730 if(token_value not in data):731 data[token_value] = {};732 #print(token_value)733 734 status = initStat(token_value)735 736 737 else:738 739 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED740 #print(token_value)741 #print(str(lb) + " " + str(fp))742 lb = lb - len(token_value)743 fp = fp - len(token_value)744 status = 2745 #print('returning' + str(status)) 746 return status747 748 749def initStat(dt):750 status = multipleInitialization(dt)751 #print(status)752 753 754 755 '''if(status != 0 and status != 2):756 status = 0757 token = lexer()758 token_type = list(token.keys())[0]759 token_value = list(token.values())[0]760 tk = token_value761 if(token_type == "identifier"):762 763 if(token_value not in data[dt]):764 data[dt][token_value]=0765 else:766 print("Syntax Error: The variable has already been initialized\n")767 return 1768 token = lexer()769 token_type = list(token.keys())[0]770 token_value = list(token.values())[0]771 772 if(token_type == "assignmentOperator" and token_value == "="):773 774 status = E(dt,tk)775 """776 print(status)777 status = 0778 token = lexer()779 token_type = list(token.keys())[0]780 token_value = list(token.values())[0]781 print(token_value)782 """783 784 elif(token_type == "punctuator" and token_value == ","):785 786 global lb, fp787 #print(token_value)788 #print(str(lb) + " " + str(fp))789 lb = lb - len(token_value)790 fp = fp - len(token_value)791 status = 2792 793 else:794 795 print("Syntax error: expected 'Assignment1 Operator' but received " + str(token_value) + "\n")796 status = 1 '''797 798 799 return status800 801def multipleInitialization(dt):802 global data803 status = 0804 token = lexer()805 token_type = list(token.keys())[0]806 token_value = list(token.values())[0]807 tk = token_value808 if(token_type == "identifier"):809 push(tk)810 #print(tk)811 if(token_value not in data[dt]):812 if(dt=="int"):813 data[dt][token_value]=int(0)814 elif(dt=="char"):815 data[dt][token_value]=string(0)816 elif(dt=="float"):817 data[dt][token_value]=float(0)818 elif(dt=="double"):819 data[dt][token_value]=float(0)820 else:821 data[dt][token_value]=0822 #print(" "+token_value +":)")823 else:824 print("Syntax Error: The variable has already been initialized\n")825 return 1826 827 token = lexer()828 token_type = list(token.keys())[0]829 token_value = list(token.values())[0]830 tv = token_value.strip()831 #print(token_value+" macha")832 if(tv == ";"):833 #print("; la")834 global lb, fp835 #print(token_value)836 #print(str(lb) + " " + str(fp))837 lb = lb - len(token_value)838 fp = fp - len(token_value)839 return 0;840 elif(token_type == "assignmentOperator" and tv == "="):841 842 status = E(dt,tk)843 codegen_assign()844 #print(status)845 846 if(status == 0):847 848 status = multinit(dt)849 if(status == 2):850 status = 0851 #print(status)852 elif(token_type == "punctuator" and tv == ","):853 #print(",")854 status = multipleInitialization(dt)855 '''global lb, fp856 #print(token_value)857 #print(str(lb) + " " + str(fp))858 lb = lb - len(token_value)859 fp = fp - len(token_value)860 status = 2 '''861 862 else:863 864 print("Syntax error: expected 'Assignment2 Operator' but received " + str(tv) + "\n")865 status = 1866 else:867 868 print("Syntax error: expected 'Identifier' but received " + str(tv) + "\n")869 status = 1870 871 return status872 873def multinit(dt):874 status = 0875 876 token = lexer()877 token_type = list(token.keys())[0]878 token_value = list(token.values())[0]879 tv = token_value.strip()880 881 if(token_type == "punctuator" and tv == ","):882 883 #print("got comma")884 status = multipleInitialization(dt)885 886 else:887 888 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED889 global lb, fp890 #print(token_value)891 #print(str(lb) + " " + str(fp))892 lb = lb - len(token_value)893 fp = fp - len(token_value)894 status = 2895 896 return status897def assignmentStatement():898 global data899 dty =''900 status = 0901 token = lexer()902 token_type = list(token.keys())[0]903 token_value = list(token.values())[0]904 tk = token_value905 #print("asgn")906 if(token_type == "identifier"):907 push(tk)908 #print(tk)909 for i in data:910 for j in data[i]:911 if(j==token_value):912 dty = i913 if(dty==''):914 print("The variable "+token_value+" has not been initialized.")915 return 1916 token = lexer()917 token_type = list(token.keys())[0]918 token_value = list(token.values())[0]919 920 if(token_type == "assignmentOperator" and token_value == "="):921 922 status = E(dty,tk)923 codegen_assign()924 925 else:926 927 print("Syntax error: expected 'Assignment3 Operator' but received " + str(token_value) + "\n")928 status = 1929 else:930 931 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED932 global lb, fp933 #print(token_value)934 #print(str(lb) + " " + str(fp))935 lb = lb - len(token_value)936 fp = fp - len(token_value)937 status = 2938 939 return status940def condition():941 status = 0942 943 status = C()944 945 return status946def C():947 status = 0948 token = lexer()949 token_type = list(token.keys())[0]950 token_value = list(token.values())[0]951 tv = token_value.strip()952 if(token_type == "identifier" or token_type=="number"):953 push(tv)954 token = lexer()955 token_type = list(token.keys())[0]956 token_value = list(token.values())[0]957 tk = token_value.strip()958 if(token_type == "relationalOperator" or token_type == "logicalOperator"):959 push(tk)960 status = C() 961 elif(token_value == ")"):962 global lb, fp963 #print(token_value)964 #print(str(lb) + " " + str(fp))965 lb = lb - len(token_value)966 fp = fp - len(token_value)967 return 0968 else:969 return 1970 elif(not (token_type == "boolean")):971 972 print("Syntax error: expected 'Boolean' but received " + str(token_value) + "\n")973 status = 1974 return status975op = ""976def E(dt,vn):977 status = F(dt,vn)978 if(status == 0):979 980 status = E1(dt,vn)981 982 return status983 984def E1(dt,vn):985 status = 0986 token = lexer()987 token_type = list(token.keys())[0]988 token_value = list(token.values())[0]989 tv = token_value.strip()990 global op;991 if(token_type == "arithmeticOperator" and tv == "+"):992 op = "+"993 push(tv)994 #print(tv)995 status = F(dt,vn)996 codegen()997 if(status == 0):998 999 status = E1(dt,vn)1000 1001 else:1002 1003 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1004 global lb, fp1005 #print(token_value)1006 #print(str(lb) + " " + str(fp))1007 lb = lb - len(token_value)1008 fp = fp - len(token_value)1009 return status1010def F(dt,vn):1011 status = 01012 1013 status = G(dt,vn)1014 1015 if(status == 0):1016 1017 status = F1(dt,vn)1018 return status1019 1020def F1(dt,vn):1021 status = 01022 token = lexer()1023 token_type = list(token.keys())[0]1024 token_value = list(token.values())[0]1025 tv = token_value.strip()1026 global op;1027 if(token_type == "arithmeticOperator" and tv == "-"):1028 op = "-"1029 push(tv)1030 #print(tv)1031 status = G(dt,vn)1032 codegen()1033 1034 if(status == 0):1035 1036 status = F1(dt,vn)1037 1038 else:1039 1040 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1041 global lb, fp1042 #print(token_value)1043 #print(str(lb) + " " + str(fp))1044 lb = lb - len(token_value)1045 fp = fp - len(token_value)1046 return status1047 1048def G(dt,vn):1049 status = 01050 1051 status = H(dt,vn)1052 if(status == 0):1053 1054 status = G1(dt,vn)1055 return status1056def G1(dt,vn):1057 status = 01058 1059 token = lexer()1060 token_type = list(token.keys())[0]1061 token_value = list(token.values())[0]1062 tv = token_value.strip()1063 global op;1064 if(token_type == "arithmeticOperator" and tv == "*"):1065 push(tv)1066 #print(tv)1067 op = "*"1068 status = H(dt,vn)1069 codegen()1070 if(status == 0):1071 1072 status = G1(dt,vn)1073 1074 else:1075 1076 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1077 global lb, fp1078 #print(token_value)1079 #print(str(lb) + " " + str(fp))1080 lb = lb - len(token_value)1081 fp = fp - len(token_value)1082 return status1083 1084def H(dt,vn):1085 status = 01086 1087 status = I(dt,vn)1088 1089 if(status == 0):1090 1091 status = H1(dt,vn)1092 return status1093 1094def H1(dt,vn):1095 status = 01096 1097 token = lexer()1098 token_type = list(token.keys())[0]1099 token_value = list(token.values())[0]1100 tv = token_value.strip()1101 1102 if(token_type == "arithmeticOperator" and tv == "/"):1103 global op;1104 op = "d";1105 push(tv)1106 #print(tv)1107 status = I(dt,vn)1108 codegen()1109 if(status == 0):1110 1111 status = H1(dt,vn)1112 1113 else:1114 1115 #RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1116 global lb, fp1117 #print(token_value + ":::" + str(len(token_value)))1118 #print(str(lb) + " " + str(fp))1119 1120 lb = lb - len(token_value)1121 fp = fp - len(token_value)1122 return status1123 1124def I(dt,vn):1125 global data1126 status = 01127 chk = 01128 token = lexer()1129 token_type = list(token.keys())[0]1130 token_value = list(token.values())[0]1131 tv = token_value.strip()1132 1133 if(token_type == "arithmeticOperator" and tv == "-"):1134 chk = 11135 push(tv)1136 #print(tv)1137 status = I()1138 codegen_umin()1139 elif(not(token_type == "identifier" or token_type == "number")):1140 print("Syntax error: expected 'Identifier/Number' but received " + str(token_value) + "\n")1141 status = 11142 return status1143 if(token_type == "identifier" or token_type == "number"):1144 push(tv)1145 #print(tv)1146 global op;1147 g = True1148 if(token_value == "identifier"):1149 if(token_value not in data[dt]):1150 print("Syntax error: The variable "+token_value+" not in "+dt)1151 g = False1152 elif(token_value == "number"):1153 if(not isinstance(token_value,dt)):1154 print("Syntax error: The variable belongs to a different type")1155 False1156 if(op=="" and g == True):1157 if(token_type == "identifier"):1158 if(chk==1):1159 data[dt][vn]=-1*data[dt][token_value]1160 chk = 01161 else:1162 #print(token_value)1163 data[dt][vn]=data[dt][token_value]1164 1165 if(token_type == "number"):1166 if(chk==1):1167 data[dt][vn]=-1*float(token_value)1168 chk = 01169 else:1170 data[dt][vn]=float(token_value)1171 elif(op=="d" and g == True):1172 if(token_type == "identifier"):1173 if(chk==1):1174 data[dt][vn]/=-1*data[dt][token_value]1175 chk = 01176 op=""1177 else:1178 data[dt][vn]/=data[dt][token_value]1179 op=""1180 1181 if(token_type == "number"):1182 if(chk==1):1183 data[dt][vn]/=-1*float(token_value)1184 chk = 01185 op = ""1186 else:1187 data[dt][vn]/=float(token_value)1188 op = ""1189 elif(op=="*" and g == True):1190 if(token_type == "identifier"):1191 if(chk==1):1192 data[dt][vn]*=-1*data[dt][token_value]1193 chk = 01194 op=""1195 else:1196 data[dt][vn]*=data[dt][token_value]1197 op=""1198 1199 if(token_type == "number"):1200 if(chk==1):1201 data[dt][vn]*=-1*float(token_value)1202 chk = 01203 op = ""1204 else:1205 data[dt][vn]*=float(token_value)1206 op = ""1207 elif(op=="-" and g == True):1208 if(token_type == "identifier"):1209 if(chk==1):1210 data[dt][vn]-=-1*data[dt][token_value]1211 chk = 01212 op=""1213 else:1214 data[dt][vn]-=data[dt][token_value]1215 op=""1216 1217 if(token_type == "number"):1218 if(chk==1):1219 data[dt][vn]-=-1*float(token_value)1220 chk = 01221 op = ""1222 else:1223 data[dt][vn]-=float(token_value)1224 op = ""1225 elif(op=="+" and g == True):1226 if(token_type == "identifier"):1227 if(chk==1):1228 data[dt][vn]+=-1*data[dt][token_value]1229 chk = 01230 op=""1231 else:1232 data[dt][vn]+=data[dt][token_value]1233 op=""1234 1235 if(token_type == "number"):1236 if(chk==1):1237 data[dt][vn]+=-1*float(token_value)1238 chk = 01239 op = ""1240 else:1241 data[dt][vn]+=float(token_value)1242 op = ""1243 return status1244 1245 1246 1247prg = open("nocomments.c").read()1248symbolTable = dict()1249externalVariables = dict()1250localVariables = list()1251keyword = ["include", "define", "while", "do", "for", "return", "extern"]1252dataType = ["void", "int", "short", "long", "char", "float", "double"]1253preDefRoutine = ["printf", "scanf"]1254#headerFile = ["stdio.h", "stdlib.h", "math.h", "string.h"]1255identifier = "^[^\d\W]\w*\Z"1256punctuator = "^[()[\]{};.,]$"1257aritmeticOperator = "^[-+*/]$"1258assignmentOperator = "^=$"1259relationalOperator = ["<", ">", "<=", ">=", "==", "!="]1260logicalOperator = ["&&", "||", "!"]1261number = "^\d+$"1262spaces = "[' ''\n''\t']"1263loadSymbolTable()1264parse_start()1265'''1266for i in data:1267 for j in data[i]:1268 print(i+" "+j+" "+str(data[i][j]))1269'''1270"""1271while lb!=len(prg):1272 lexer()1273"""1274#print(symbolTable)1275#print(externalVariables)1276"""1277PARSER ERROR CODES:12780-SUCCESS12791-FAILURE1280"""1281 ...

Full Screen

Full Screen

parser.py

Source:parser.py Github

copy

Full Screen

1import lexer, os, parseIndividual2from objects.varObject import VarObject3from objects.printObject import PrintObject4from objects.ifObject import IfObject5from objects.elseObject import ElseObject6from objects.elseIfObject import ElseIfObject7from objects.whileObject import WhileObject8from objects.forLoopObject import ForLoopObject9from objects.functionObject import FunctionObject10from objects.giveObject import GiveObject11from objects.functionSingleObject import FunctionSingleObject 12from objects.readFileObject import ReadFileObject13from objects.writeFileObject import WriteFileObject14class Parser(object):15 def __init__(self, tokens):16 self.tokens = tokens17 self.token_index = 018 self.transpiled_code = ""19 self.indents = 020 self.funcs = []21 def parse(self):22 23 while self.token_index < len(self.tokens):24 ###print(self.token_index)25 ##print(self.tokens[self.token_index][0])26 token_type = self.tokens[self.token_index][0] 27 token_value = self.tokens[self.token_index][1]28 #print(token_value + "Is it")29 if token_type == "IDENTIFIER" and self.tokens[self.token_index + 1][1] in ["+=", "=", "-="]:30 self.parse_variable_declaration(self.tokens[self.token_index : len(self.tokens)])31 elif token_type == "COMMENT":32 self.token_index += 133 elif token_type == "IDENTIFIER" and token_value == "stampLn":34 self.parse_stamp_ln(self.tokens[self.token_index : len(self.tokens)])35 elif token_type == "IDENTIFIER" and token_value == "completeIf":36 self.parse_if_statement(self.tokens[self.token_index : len(self.tokens)])37 elif token_type == "IDENTIFIER" and token_value == "otherwise":38 self.parse_else_statement(self.tokens[self.token_index: len(self.tokens)])39 elif token_type == "IDENTIFIER" and token_value == "completeElseIf":40 self.parse_elif_statement(self.tokens[self.token_index: len(self.tokens)])41 elif token_type == "CASE" and token_value == "}":42 self.indents -= 143 self.token_index += 144 elif token_type == "CASE" and token_value == "{":45 self.token_index += 146 elif token_type == "IDENTIFIER" and token_value == "completeWhile":47 self.parse_while_loop(self.tokens[self.token_index: len(self.tokens)])48 elif token_type == "IDENTIFIER" and token_value in self.funcs:49 self.parse_function(self.tokens[self.token_index: len(self.tokens)], token_value)50 elif token_type == "IDENTIFIER" and token_value == "quitLoop":51 if self.tokens[self.token_index + 1][1] == ";":52 exec_code = "break"53 for i in range(self.indents):54 exec_code = "\t" + exec_code55 self.transpiled_code = self.transpiled_code + f"{exec_code}\n"56 self.token_index += 257 else:58 raise ValueError("';' expected after quitLoop statement")59 elif token_type == "IDENTIFIER" and token_value == "advance":60 if self.tokens[self.token_index + 1][1] == ";":61 exec_code = "pass"62 for i in range(self.indents):63 exec_code = "\t" + exec_code64 self.transpiled_code = self.transpiled_code + f"{exec_code}\n"65 self.token_index += 266 else:67 raise ValueError("';' expected after advance statement")68 elif token_type == "IDENTIFIER" and token_value == "loop":69 self.parse_for_loop(self.tokens[self.token_index: len(self.tokens)])70 elif token_type == "IDENTIFIER" and token_value == "give":71 self.parse_give(self.tokens[self.token_index: len(self.tokens)])72 elif token_type == "IDENTIFIER" and token_value == "defFunc":73 self.parse_function_declaration(self.tokens[self.token_index: len(self.tokens)])74 elif token_type == "IDENTIFIER" and token_value == "use":75 self.parse_import(self.tokens[self.token_index: len(self.tokens)])76 elif token_type == "IDENTIFIER" and token_value == "readFile":77 self.parseReadFile(self.tokens[self.token_index: -1])78 elif token_type == "IDENTIFIER" and token_value == "writeFile":79 self.parseWriteFile(self.tokens[self.token_index: -1])80 else:81 #print(self.token_index)82 raise SyntaxError("ERR: Undefined Item: " + token_value)83 self.transpiled_code = "import time, random, math\n" + self.transpiled_code84 #print("\n\n" + self.transpiled_code)85 with open("code.py", "w") as iju:86 iju.write(self.transpiled_code)87 iju.close()88 return self.transpiled_code89 def parse_extra(self):90 while self.token_index < len(self.tokens):91 ###print(self.token_index)92 ##print(self.tokens[self.token_index][0])93 token_type = self.tokens[self.token_index][0] 94 token_value = self.tokens[self.token_index][1]95 #print(token_value + "Is it")96 if token_type == "IDENTIFIER" and self.tokens[self.token_index + 1][1] in ["+=", "=", "-="]:97 self.parse_variable_declaration(self.tokens[self.token_index : len(self.tokens)])98 elif token_type == "COMMENT":99 self.token_index += 1100 elif token_type == "IDENTIFIER" and token_value == "stampLn":101 self.parse_stamp_ln(self.tokens[self.token_index : len(self.tokens)])102 elif token_type == "IDENTIFIER" and token_value == "completeIf":103 self.parse_if_statement(self.tokens[self.token_index : len(self.tokens)])104 elif token_type == "IDENTIFIER" and token_value == "completeElse":105 self.parse_else_statement(self.tokens[self.token_index: len(self.tokens)])106 elif token_type == "IDENTIFIER" and token_value == "completeElseIf":107 self.parse_elif_statement(self.tokens[self.token_index: len(self.tokens)])108 elif token_type == "CASE" and token_value == "}":109 self.indents -= 1110 self.token_index += 1111 elif token_type == "CASE" and token_value == "{":112 self.token_index += 1113 elif token_type == "IDENTIFIER" and token_value == "completeWhile":114 self.parse_while_loop(self.tokens[self.token_index: len(self.tokens)])115 elif token_type == "IDENTIFIER" and token_value in self.funcs:116 self.parse_function(self.tokens[self.token_index: len(self.tokens)], token_value)117 elif token_type == "IDENTIFIER" and token_value == "quitLoop":118 if self.tokens[self.token_index + 1][1] == ";":119 exec_code = "break"120 for i in range(self.indents):121 exec_code = "\t" + exec_code122 self.transpiled_code = self.transpiled_code + f"{exec_code}\n"123 self.token_index += 2124 else:125 raise ValueError("';' expected after quitLoop statement")126 elif token_type == "IDENTIFIER" and token_value == "advance":127 if self.tokens[self.token_index + 1][1] == ";":128 exec_code = "pass"129 for i in range(self.indents):130 exec_code = "\t" + exec_code131 self.transpiled_code = self.transpiled_code + f"{exec_code}\n"132 self.token_index += 2133 else:134 raise ValueError("';' expected after advance statement")135 elif token_type == "IDENTIFIER" and token_value == "loop":136 self.parse_for_loop(self.tokens[self.token_index: len(self.tokens)])137 elif token_type == "IDENTIFIER" and token_value == "give":138 self.parse_give(self.tokens[self.token_index: len(self.tokens)])139 elif token_type == "IDENTIFIER" and token_value == "defFunc":140 self.parse_function_declaration(self.tokens[self.token_index: len(self.tokens)])141 elif token_type == "IDENTIFIER" and token_value == "use":142 self.parse_import(self.tokens[self.token_index: len(self.tokens)])143 else:144 #print(self.token_index)145 raise SyntaxError("ERR: Undefined Item: " + token_value)146 return self.transpiled_code147 148 def parse_variable_declaration(self, tkns):149 tokens_checked = 0150 name = ''151 operator = ''152 value = ''153 for token in range(len(tkns)):154 token_type = tkns[tokens_checked][0]155 token_value = tkns[tokens_checked][1]156 if token_type == "STATEMENT_END":157 break158 elif token == 0 and token_type == "IDENTIFIER":159 name = token_value160 elif token == 0 and token_value != "IDENTIFIER":161 raise ValueError("ERR: Invalid Variable Name " + token_value)162 elif token == 1 and token_type == "OPERATOR":163 operator = token_value164 elif token == 1 and token_type != "OPERATOR":165 raise ValueError("ERR: Invalid Variable Operator " + token_value)166 elif token == 2 and token_type in ['IDENTIFIER', 'INTEGER', "BOOL", "STRING", "CASE"]:167 value = token_value168 elif token == 2 and token_type not in ['IDENTIFIER','INTEGER', "BOOL", "STRING", "CASE"]:169 raise ValueError("ERR: Inavlid Variable Value " + token_value + " in " + name)170 elif token >= 3 and token_type in ['IDENTIFIER', 'INTEGER', "BOOL", "STRING", "OPERATOR", "CASE"]:171 value = value + " " + token_value172 elif token >= 3 and token_type not in ['IDENTIFIER', 'INTEGER', "BOOL", "STRING", "OPERATOR", "CASe"]:173 raise ValueError("ERR: Inavlid Variable Value " + token_value + " in " + name)174 tokens_checked += 1175 ##print(name, operator, value)176 if name == "lenstadt":177 raise ValueError("Variable Cannot Be Named 'lenstadt'")178 VarObj = VarObject()179 self.transpiled_code = self.transpiled_code + VarObj.transpile(name, operator, value, self.indents)180 self.token_index += tokens_checked + 1181 def parse_stamp_ln(self, tkns):182 tokens_checked = 0183 value = ''184 for token in range(len(tkns)):185 token_type = tkns[tokens_checked][0]186 token_value = tkns[tokens_checked][1]187 if token_type == "STATEMENT_END":188 break189 elif token == 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL"]:190 value = token_value191 elif token == 1 and token_type not in ["IDENTIFIER", "STRING", "INTEGER", "BOOL"]:192 raise ValueError("Invalid StampLn Value " + token_value)193 elif token >= 2 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:194 value = value + " " + token_value195 elif token >= 2 and token_type not in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:196 raise ValueError("Invalid StampLn Value " + token_value)197 tokens_checked += 1198 printObj = PrintObject()199 self.transpiled_code = self.transpiled_code + printObj.transpile(value, self.indents)200 self.token_index += tokens_checked + 1201 def parse_if_statement(self, tkns):202 tokens_checked = 0203 case = ''204 for token in range(len(tkns)):205 token_type = tkns[tokens_checked][0]206 token_value = tkns[tokens_checked][1]207 if token_type == "CASE" and token_value == "{":208 break209 elif token == 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:210 case = case + token_value211 elif token == 1 and token_value not in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:212 raise ValueError("Invalid CompleteIf Statement Particle: " + token_value)213 elif token >= 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:214 case = case + " " + token_value215 elif token >= 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:216 raise ValueError("Invalid CompleteIf Statement Particle: " + token_value)217 tokens_checked += 1218 IfObj = IfObject()219 self.transpiled_code = self.transpiled_code + IfObj.transpile(case, self.indents)220 self.token_index = self.token_index + tokens_checked + 1221 self.indents += 1222 def parse_else_statement(self, tkns):223 tokens_checked = 0224 for token in range(len(tkns)):225 token_type = tkns[tokens_checked][0]226 token_value = tkns[tokens_checked][1]227 if token == 1 and token_type == "CASE" and token_value == "{":228 break229 elif token == 1 and not token_type == "CASE" and token_value == "{":230 raise ValueError("Invalid CompleteElse Particle: " + token_value)231 tokens_checked += 1232 ElseObj = ElseObject()233 self.transpiled_code = self.transpiled_code + ElseObj.transpile(self.indents)234 self.token_index = self.token_index + tokens_checked + 1235 self.indents += 1236 def parse_elif_statement(self, tkns):237 tokens_checked = 0238 case = ''239 for token in range(len(tkns)):240 token_type = tkns[tokens_checked][0]241 token_value = tkns[tokens_checked][1]242 if token_type == "CASE" and token_value == "{":243 break244 elif token == 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL"]:245 case = case + token_value246 elif token == 1 and token_value not in ["IDENTIFIER", "STRING", "INTEGER", "BOOL"]:247 raise ValueError("Invalid CompleteElseIf Statement Particle: " + token_value)248 elif token >= 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:249 case = case + " " + token_value250 elif token >= 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:251 raise ValueError("Invalid CompleteElseIf Statement Particle: " + token_value)252 tokens_checked += 1253 ElseIfObj = ElseIfObject()254 self.transpiled_code = self.transpiled_code + ElseIfObj.transpile(case, self.indents)255 self.token_index += tokens_checked + 1256 self.indents += 1257 def parse_while_loop(self, tkns):258 tokens_checked = 0259 case = ''260 for token in range(len(tkns)):261 token_type = tkns[tokens_checked][0]262 token_value = tkns[tokens_checked][1]263 if token_type == "CASE" and token_value == "{":264 break265 elif token == 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL"]:266 case = case + token_value267 elif token == 1 and token_value not in ["IDENTIFIER", "STRING", "INTEGER", "BOOL"]:268 raise ValueError("Invalid CompleteWhile Particle: " + token_value)269 elif token >= 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:270 case = case + " " + token_value271 elif token >= 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:272 raise ValueError("Invalid CompleteWhile Particle: " + token_value)273 tokens_checked += 1274 WhileObj = WhileObject()275 self.transpiled_code = self.transpiled_code + WhileObj.transpile(case, self.indents)276 self.token_index += tokens_checked + 1277 self.indents += 1278 def parse_for_loop(self, tkns):279 tokens_checked = 0280 temp_var = ""281 to = ''282 fro = ''283 place_holder = 0284 for token in range(len(tkns)):285 token_type = tkns[tokens_checked][0]286 token_value = tkns[tokens_checked][1]287 #print(token == place_holder and token_type in ['IDENTIFIER', 'INTEGER'])288 if token == 0:289 pass290 elif token_type == "CASE" and token_value == "{":291 break292 elif token == 1 and token_type == "IDENTIFIER":293 temp_var = token_value294 elif token == 1 and token_type != "IDENTIFIER":295 raise ValueError("Invalid Temporary Loop Variable: " + token_value)296 elif token == 2 and token_value == "from":297 pass298 elif token == 2 and token_value != "from":299 raise ValueError("'from' expected.")300 elif token == 3 and token_type in ['IDENTIFIER', 'INTEGER']:301 fro = fro + token_value302 elif token == 3 and token_type not in ['IDENTIFIER', 'INTEGER']:303 raise ValueError("Invalid to Value in Loop")304 elif token > 3 and token < place_holder and place_holder != 0 and token_type in ['IDENTIFIER', 'INTEGER', 'OPERATOR'] and token_value != "to":305 fro = fro + " " + token_value306 elif token > 3 and token_value == "to":307 place_holder = token + 1308 #print("Hahahaha " + str(place_holder))309 elif token > 3 and token_type not in ['INTEGER', 'IDENTIFIER', 'OPERATOR']:310 raise ValueError('Invalid From Value in Loop')311 elif token == place_holder and token_type in ['IDENTIFIER', 'INTEGER']:312 #print("Bob")313 to = to + token_value314 elif token == place_holder and token_type not in ['IDENTIFIER', 'INTEGER']:315 raise ValueError("Invalid to Value in Loop")316 elif token > place_holder and token_type in ['IDENTIFIER', 'INTEGER', 'OPERATOR']:317 to = to + " " + token_value318 elif token > place_holder and token_type not in ['INTEGER', 'IDENTIFIER', 'OPERATOR']:319 raise ValueError('Invalid To Value in Loop')320 tokens_checked += 1321 #print("Hey you ", fro, to)322 ForLoopObj = ForLoopObject()323 self.transpiled_code = self.transpiled_code + ForLoopObj.transpile(temp_var, fro, to, self.indents)324 self.token_index += tokens_checked + 1325 self.indents += 1326 def parse_function_declaration(self, tkns):327 tokens_checked = 0328 name = ''329 params = []330 place = 0331 for token in range(len(tkns)):332 token_type = tkns[tokens_checked][0]333 token_value = tkns[tokens_checked][1]334 if token_type == "CASE" and token_value == "{":335 break336 elif token == 1 and token_type == "IDENTIFIER":337 name = token_value338 elif token == 1 and token_type != "IDENTIFIER":339 raise ValueError("Invalid Function Name")340 elif token == 2 and token_type == "CASE" and token_value == "(":341 pass342 elif token == 2 and token_value != "CASE":343 raise ValueError("'(' expected.")344 elif token == 2 and token_value != "(":345 raise ValueError("'(' expected.")346 elif token > 3 and token_value == ")":347 pass348 elif token == 3 and token_type == "IDENTIFIER":349 params.append(token_value)350 place = token + 1351 elif token == 3 and token_type == "CASE" and token_value == ")":352 pass353 elif token == 3 and token_type != "IDENTIFIER":354 raise ValueError("Invalid Parameter Name")355 elif token == place and place != 0 and token_type == "SEPERATOR":356 pass357 elif token == place and place != 0 and token_type != "SEPERATOR":358 raise ValueError("',' expected")359 elif token == place + 1 and token_type == "IDENTIFIER":360 params.append(token_value)361 place = token + 1362 elif token == place + 1 and token_type != "IDENTIFIER":363 raise ValueError("Invalid Parameter Name") 364 tokens_checked += 1365 FunctionObj = FunctionObject()366 self.transpiled_code = self.transpiled_code + FunctionObj.transpile(name, params, self.indents)367 self.token_index += tokens_checked + 1368 #print(name, tokens_checked, self.tokens[self.token_index])369 self.indents += 1370 self.funcs.append(name)371 def parse_give(self, tkns):372 tokens_checked = 0373 value = ''374 for token in range(len(tkns)):375 token_type = tkns[tokens_checked][0]376 token_value = tkns[tokens_checked][1]377 if token_type == "STATEMENT_END":378 break379 elif token == 1 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL"]:380 value = token_value381 elif token == 1 and token_type not in ["IDENTIFIER", "STRING", "INTEGER", "BOOL"]:382 raise ValueError("Invalid Give Value " + token_value)383 elif token >= 2 and token_type in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:384 value = value + " " + token_value385 elif token >= 2 and token_type not in ["IDENTIFIER", "STRING", "INTEGER", "BOOL", "OPERATOR"]:386 raise ValueError("Invalid Give Value " + token_value)387 tokens_checked += 1388 GiveObj = GiveObject()389 self.transpiled_code = self.transpiled_code + GiveObj.transpile(value, self.indents)390 self.token_index = self.token_index + tokens_checked + 1391 def parse_function(self, tkns, nm):392 tokens_checked = 0393 name = nm394 pars = []395 current_par = ''396 place = 2397 for token in range(len(tkns)):398 token_type = tkns[tokens_checked][0] 399 token_value = tkns[tokens_checked][1] 400 #print(token)401 if token == 1 and token_type == "CASE" and token_value == "(":402 pass403 elif token == 1 and token_type != "CASE":404 #print("mhuah")405 raise ValueError("'(' expected")406 elif token == 1 and token_value != "(":407 #print("mhua")408 raise ValueError("'(' expected")409 elif token >= 2 and token_value == ")":410 if tkns[tokens_checked + 1][0] == "STATEMENT_END":411 pars.append(current_par)412 tokens_checked += 1413 break414 else:415 raise ValueError("';' expected")416 elif token == place and token_type in ['IDENTIFIER', 'STRING', 'BOOL', "INTEGER"]:417 current_par = token_value418 elif token == place and token_type not in ['IDENTIFIER', 'STRING', 'BOOL', "INTEGER"]:419 raise ValueError("Invalid Function Parameter Contents: " + token_value)420 elif token > place and token_type == "SEPERATOR":421 pars.append(current_par)422 current_par = ""423 place = token + 1424 elif token > place and token_type in ['IDENTIFIER', 'STRING', 'BOOL', "INTEGER", "OPERATOR"]:425 current_par = current_par + " " + token_value426 elif token > place and token_type not in ['IDENTIFIER', 'STRING', 'BOOL', "INTEGER", "OPERATOR"]:427 raise ValueError("Invalid Function Parameter Contents: " + token_value)428 tokens_checked += 1429 FunctionSingleObj = FunctionSingleObject()430 self.transpiled_code = self.transpiled_code + FunctionSingleObj.transpile(name, pars, self.indents)431 self.token_index = self.token_index + tokens_checked + 1432 def parse_import(self, tkns):433 tokens_checked = 0434 module = "modules/"435 mod2 = ""436 for token in range(len(tkns)):437 token_type = tkns[tokens_checked][0] 438 token_value = tkns[tokens_checked][1] 439 if token == 1 and token_type == "IDENTIFIER":440 module = module + token_value + ".stadt"441 mod2 = token_value442 elif token == 1 and token_type != "IDENTIFIER":443 raise ValueError("Invalid Module Name")444 elif token == 2 and token_type == "STATEMENT_END":445 #tokens_checked += 1446 break447 elif token == 2 and token_type != "STATEMENT_END":448 raise ValueError("Semicolon (;), expected")449 tokens_checked += 1450 if not os.path.exists(module):451 raise ImportError("Cannot Use " + mod2)452 else:453 IndividualPrsr = parseIndividual.IndividualParser(module)454 modulework = IndividualPrsr.add_code()455 self.transpiled_code = modulework[0] + self.transpiled_code456 self.funcs += modulework[1]457 self.token_index += tokens_checked + 1458 def parseReadFile(self, tkns):459 tokens_checked = 0460 filepath = "" 461 asread = False462 asread_token = 0463 varname = ""464 for token in range(len(tkns)):465 token_type = tkns[tokens_checked][0] 466 token_value = tkns[tokens_checked][1]467 if token == 1 and token_type in ["STRING", "IDENTIFIER"] and not asread:468 filepath = token_value469 elif token_type == "STATEMENT_END":470 break471 elif token == 1 and token_type not in ["STRING", "IDENTIFIER"] and not asread:472 raise ValueError("Invalid File Path to ReadFile")473 elif token >= 2 and token_value == "as":474 asread = True475 asread_token = token476 elif token >= 2 and token_type in ['IDENTIFIER', "STRING", "OPERATOR"] and not asread:477 filepath += " " + token_value478 elif token >= 2 and token_type not in ['IDENTIFIER', "STRING", "OPERATOR"] and not asread:479 raise ValueError("Invalid FilePath in ReadFile")480 elif token == asread_token + 1 and token_type in ['IDENTIFIER'] and asread:481 varname = token_value482 elif token == asread_token + 1 and token_type not in ['IDENTIFIER'] and asread:483 raise ValueError("Invalid Variable name in readFile")484 485 tokens_checked += 1486 ReadFileObj = ReadFileObject()487 self.transpiled_code += ReadFileObj.transpile(self.indents, varname, filepath)488 self.token_index += tokens_checked + 1489 def parseWriteFile(self, tkns):490 tokens_checked = 0491 filepath = "" 492 asread = False493 asread_token = 0494 text = ""495 for token in range(len(tkns)):496 token_type = tkns[tokens_checked][0] 497 token_value = tkns[tokens_checked][1]498 if token == 1 and token_type in ["STRING", "IDENTIFIER"] and not asread:499 filepath = token_value500 elif token_type == "STATEMENT_END":501 break502 elif token == 1 and token_type not in ["STRING", "IDENTIFIER"] and not asread:503 raise ValueError("Invalid File Path to WriteFile")504 elif token >= 2 and token_value == "as":505 asread = True506 asread_token = token507 elif token >= 2 and token_type in ['IDENTIFIER', "STRING", "OPERATOR"] and not asread:508 filepath += " " + token_value509 elif token >= 2 and token_type not in ['IDENTIFIER', "STRING", "OPERATOR"] and not asread:510 raise ValueError("Invalid FilePath in ReadFile")511 elif token == asread_token + 1 and token_type in ['IDENTIFIER', 'STRING'] and asread:512 text = token_value513 elif token == asread_token + 1 and token_type not in ['IDENTIFIER', 'STRING'] and asread:514 raise ValueError("Invalid Text value in writeFile")515 elif token >= asread_token + 1 and token_type in ['IDENTIFIER', 'STRING'] and asread:516 text += " " + token_value517 elif token >= asread_token + 1 and token_type not in ['IDENTIFIER', 'STRING'] and asread:518 raise ValueError("Invalid Text value in writeFile")519 520 tokens_checked += 1521 WriteFileObj = WriteFileObject()522 self.transpiled_code += WriteFileObj.transpile(self.indents, filepath, text)523 self.token_index += tokens_checked + 1...

Full Screen

Full Screen

tokenize_and_compute.py

Source:tokenize_and_compute.py Github

copy

Full Screen

1import sys2import tokenize3# tk = tokenize.generate_tokens(f.readline())4# for token_num, token_value, _, _, _ in tk: example that how to use tokenize5# up-down6# expression = term + term + ... + term7# term = factor * factor * ... * factor8# factor = num | (expression)9# recursive10class Token:11 def __init__(self, token_num, token_value):12 self.token_num = token_num13 self.token_value = token_value14global current_token15def current():16 global current_token17 return current_token18def next(tk):19 token_num, token_value, _, _, _ = tk.__next__()20 global current_token21 current_token = Token(token_num, token_value)22def expr(tk):23 s1 = term(tk)24 token_num, token_value = current().token_num, current().token_value25 value = s126 while token_value == "+" or token_value == "-":27 print("expr token is %s" % token_value)28 next(tk)29 s2 = term(tk)30 if token_value == "+":31 value += s232 else:33 value -= s234 token_num, token_value = current().token_num, current().token_value35 36 print("expr value is %s" % value)37 return value38def term(tk):39 f1 = factor(tk)40 token_num, token_value = current().token_num, current().token_value41 value = f142 while token_value == "*" or token_value == "/":43 print("term token is %s" % token_value)44 next(tk)45 f2 = term(tk)46 if token_value == '*':47 value *= f248 else:49 value /= f250 token_num, token_value = current().token_num, current().token_value51 52 print("term value is %s" % value)53 return value54def factor(tk):55 value = 056 if current_token.token_num == tokenize.NUMBER:57 value = current_token.token_value58 next(tk)59 elif current_token.token_value == "(":60 next(tk)61 f = expr(tk)62 if current_token.token_value != ")":63 print("parse error! value=%s" % current().token_value)64 value = f65 next(tk)66 return int(value)67if __name__ == '__main__':68 with open(sys.argv[1], "r") as f:69 tk = tokenize.generate_tokens(f.readline)70 next(tk)...

Full Screen

Full Screen

token.py

Source:token.py Github

copy

Full Screen

1""" Module for token functionality. """2import api3from api.common import InternalException4def get_token_path(token_name):5 """6 Formats the token name into a token path.7 Returns:8 The token path9 """10 return "tokens.{}".format(token_name)11def set_token(key, token_name, token_value=None):12 """13 Sets a token for the user.14 Args:15 key: the unique identifier object16 token_name: the name of the token to set17 token_value: optionally specify the value of the token18 Returns:19 The token value20 """21 db = api.common.get_conn()22 # Should never realistically collide.23 if token_value is None:24 token_value = api.common.hash(str(key) + api.common.token())25 db.tokens.update(26 key, {'$set': {27 get_token_path(token_name): token_value28 }}, upsert=True)29 return token_value30def delete_token(key, token_name):31 """32 Removes the password reset token for the user in mongo33 Args:34 key: the unique identifier object35 token_name: the name of the token36 """37 db = api.common.get_conn()38 db.tokens.update(key, {'$unset': {get_token_path(token_name): ''}})39def find_key(query, multi=False):40 """41 Find a key based on a particular query.42 Args:43 query: the mongo query44 multi: defaults to False, return at most one result45 """46 db = api.common.get_conn()47 find_func = db.tokens.find_one48 if multi:49 find_func = db.tokens.find50 return find_func(query)51def find_key_by_token(token_name, token_value):52 """53 Searches the database for a user with a token_name token_value pair.54 Args:55 token_name: the name of the token56 token_value: the value of the token57 """58 db = api.common.get_conn()59 key = db.tokens.find_one({60 get_token_path(token_name): token_value61 }, {62 "_id": 0,63 "tokens": 064 })65 if key is None:66 raise InternalException("Could not find {}.".format(token_name))...

Full Screen

Full Screen

token_model.py

Source:token_model.py Github

copy

Full Screen

1from datetime import datetime, timedelta2from flask_sqlalchemy import SQLAlchemy3from settings import app4db = SQLAlchemy(app)5class Token(db.Model):6 __tablename__ = "gs_token"7 id = db.Column(db.Integer, primary_key = True)8 user_id = db.Column(db.Integer, nullable = False)9 token_value = db.Column(db.String(512), nullable = False)10 issued_at = db.Column(db.DateTime, nullable = False, default = datetime.now())11 12 def get_token_by_user(user_id):13 return Token.query.filter_by(user_id = user_id).first()14 15 def create_new_token(user_id, token_value):16 try:17 token = Token(18 user_id = user_id,19 token_value = token_value20 )21 db.session.add(token)22 db.session.commit()23 return token_value24 except Exception as e:25 print(e)26 return False27 28 def validate_token(token_value):29 return Token.query.filter_by(token_value = token_value).first()30 def refresh_token(token, token_value):31 try:32 token.token_value = token_value33 token.issued_at = datetime.now()34 db.session.commit()35 return True36 except Exception as e:37 print(e)38 return False39try:40 db.create_all()41except Exception as e:42 print(e)...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1import { TOKEN_VALUE } from 'ng-mocks';2import { mockProvider } from 'ng-mocks';3import { MockBuilder } from 'ng-mocks';4import { MockRender } from 'ng-mocks';5import { MockModule } from 'ng-mocks';6import { MockComponent } from 'ng-mocks';7import { MockDirective } from 'ng-mocks';8import { MockPipe } from 'ng-mocks';9import { MockedComponent } from 'ng-mocks';10import { MockedDirective } from 'ng-mocks';11import { MockedPipe } from 'ng-mocks';12import { MockInstance } from 'ng-mocks';13import { MockService } from 'ng-mocks';14import { MockRenderOptions } from 'ng-mocks';15import { MockInstanceOptions } from 'ng-mocks';16import { MockServiceOptions } from 'ng-mocks';17import { MockComponentOptions } from 'ng-mocks';18import { MockDirectiveOptions } from 'ng-mocks';19import { MockPipeOptions } from 'ng-mocks';20import { MockProviderOptions } from 'ng-mocks';21import { MockRenderResult } from 'ng-mocks';22import { MockRenderStatic } from 'ng-mocks';23import { MockRenderStaticOptions } from 'ng-mocks';24import { MockRenderStaticResult } from 'ng-mocks';25import { MockRenderStaticComponent } from 'ng-mocks';26import { MockRenderStaticDirective } from 'ng-m

Full Screen

Using AI Code Generation

copy

Full Screen

1import { TOKEN_VALUE } from 'ng-mocks/dist/lib/mock-helper/mock-helper';2describe('ng-mocks', () => {3 it('should provide token value', () => {4 expect(TOKEN_VALUE).toBe('token value');5 });6});7declare const TOKEN_VALUE: string;8{9 "compilerOptions": {10 "paths": {11 }12 }13}

Full Screen

Using AI Code Generation

copy

Full Screen

1import {TOKEN_VALUE} from 'ng-mocks';2import {MyService} from './my.service';3import { MockBuilder, MockRender } from 'ng-mocks';4import { MyComponent } from './my.component';5import { MyModule } from './my.module';6describe('MyComponent', () => {7 beforeEach(() => MockBuilder(MyComponent).keep(MyService));8 it('should render', () => {9 const fixture = MockRender(MyComponent);10 expect(fixture.point.componentInstance).toBeDefined();11 });12 it('should render', () => {13 const fixture = MockRender(MyComponent);14 expect(TOKEN_VALUE(MyService)).toBeDefined();15 });16});17import { NgModule } from '@angular/core';18import { MyComponent } from './my.component';19import { MyService } from './my.service';20@NgModule({21})22export class MyModule {}23import { Component } from '@angular/core';24import { MyService } from './my.service';25@Component({26})27export class MyComponent {28 constructor(private readonly myService: MyService) {}29}30import { Injectable } from '@angular/core';31@Injectable()32export class MyService {}33{34 "compilerOptions": {35 }36}37{38 "compilerOptions": {39 "paths": {40 },41 },42}

Full Screen

Using AI Code Generation

copy

Full Screen

1import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const';2import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index';3import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index.js';4import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index.ts';5import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index.d.ts';6import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const';7import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index';8import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index.js';9import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index.ts';10import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index.d.ts';11import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const';12import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index';13import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index.js';14import {TOKEN_VALUE} from 'ng-mocks/dist/lib/common/const/index.ts';15import

Full Screen

Using AI Code Generation

copy

Full Screen

1import {TOKEN_VALUE} from 'ng-mocks';2import {SomeService} from './some.service';3import {SomeComponent} from './some.component';4describe('SomeComponent', () => {5 it('should use the real SomeService', () => {6 const service = new SomeService();7 const component = new SomeComponent(service);8 expect(component.someMethod()).toBe('real value');9 });10 it('should use a mock SomeService', () => {11 const service = new SomeService();12 spyOn(service, 'someMethod').and.returnValue('mocked value');13 const component = new SomeComponent(service);14 expect(component.someMethod()).toBe('mocked value');15 });16 it('should use a mock SomeService with TOKEN_VALUE', () => {17 const service = new SomeService();18 spyOn(service, 'someMethod').and.returnValue('mocked value');19 const component = new SomeComponent(TOKEN_VALUE);20 expect(component.someMethod()).toBe('mocked value');21 });22});23import {Injectable} from '@angular/core';24@Injectable()25export class SomeService {26 someMethod() {27 return 'real value';28 }29}30import {Component, Inject} from '@angular/core';31import {SomeService} from './some.service';32@Component({selector: 'some', template: ''})33export class SomeComponent {34 constructor(@Inject(SomeService) private service: SomeService) {}35 someMethod() {36 return this.service.someMethod();37 }38}39import {TOKEN_VALUE} from 'ng-mocks';40import {SomeService} from './some.service';41import {SomeComponent} from './some.component';42describe('SomeComponent', () => {43 it('should use the real SomeService', () => {44 const service = new SomeService();45 const component = new SomeComponent(service);46 expect(component.someMethod()).toBe('real value');47 });48 it('should use a mock SomeService', () =>

Full Screen

Using AI Code Generation

copy

Full Screen

1import {mockComponent} from '@ng-mocks';2import {AppComponent} from './app.component';3import {MyComponent} from './my.component';4describe('AppComponent', () => {5 let component: AppComponent;6 beforeEach(() => {7 component = new AppComponent();8 component.myComponent = mockComponent(MyComponent);9 });10 it('should create the app', () => {11 expect(component).toBeTruthy();12 });13 it(`should have as title 'app'`, () => {14 expect(component.title).toEqual('app');15 });16 it('should render title in a h1 tag', () => {17 const fixture = TestBed.createComponent(AppComponent);18 fixture.detectChanges();19 const compiled = fixture.debugElement.nativeElement;20 expect(compiled.querySelector('h1').textContent).toContain('Welcome to app!');21 });22});23import { Component, OnInit, ViewChild } from '@angular/core';24import { MyComponent } from './my.component';25@Component({26})27export class AppComponent implements OnInit {28 title = 'app';29 @ViewChild(MyComponent) myComponent: MyComponent;30 ngOnInit() {31 this.myComponent.myMethod();32 }33}34import { Component, OnInit } from '@angular/core';35@Component({36})37export class MyComponent implements OnInit {38 constructor() { }39 ngOnInit() {40 }41 myMethod() {42 console.log('myMethod called');43 }44}45import {async, ComponentFixture, TestBed} from '@angular/core/testing';46import {MyComponent} from './my.component';47describe('MyComponent', () => {48 let component: MyComponent;49 let fixture: ComponentFixture<MyComponent>;50 beforeEach(async(() => {51 TestBed.configureTestingModule({52 })53 .compileComponents();54 }));55 beforeEach(() => {56 fixture = TestBed.createComponent(MyComponent);57 component = fixture.componentInstance;58 fixture.detectChanges();59 });60 it('should create', () => {61 expect(component).toBeTruthy();62 });63 it('should call myMethod', () => {64 spyOn(component, 'myMethod');65 component.myMethod();66 expect(component.myMethod).toHaveBeenCalled();67 });68});

Full Screen

Using AI Code Generation

copy

Full Screen

1const TOKEN_VALUE = require('ng-mocks').TOKEN_VALUE;2const mock = require('ng-mocks').mock;3describe('Test', () => {4 it('should get value', () => {5 const value = TOKEN_VALUE(mock({token: 'token'}), 'token');6 expect(value).toBe('token');7 });8});

Full Screen

Using AI Code Generation

copy

Full Screen

1describe('AppComponent', () => {2 beforeEach(async(() => {3 TestBed.configureTestingModule({4 }).compileComponents();5 }));6 it('should create the app', () => {7 const fixture = TestBed.createComponent(AppComponent);8 const app = fixture.debugElement.componentInstance;9 expect(app).toBeTruthy();10 });11 it('should have a button', () => {12 const fixture = TestBed.createComponent(AppComponent);13 const button = fixture.debugElement.query(By.directive(TOKEN_VALUE));14 expect(button).toBeTruthy();15 });16});17it('should have a button', () => {18 const fixture = TestBed.createComponent(AppComponent);19 const button = fixture.debugElement.query(By.css('button'));20 expect(button).toBeTruthy();21 });22it('should have a button', () => {23 const fixture = TestBed.createComponent(AppComponent);24 const button = fixture.debugElement.query(By.directive(TOKEN_VALUE));25 const button2 = fixture.debugElement.query(By.css('button'));26 expect(button).toBeTruthy();27 expect(button2).toBeTruthy();28 });29it('should have a button', () => {30 const fixture = TestBed.createComponent(AppComponent);31 const button = fixture.debugElement.query(By.directive(TOKEN_VALUE));32 expect(button).toBeTruthy();33});34expect(button).toBeTruthy();

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run ng-mocks automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful