Best Python code snippet using gherkin-python
mparser.py
Source:mparser.py  
...194                value = str(token_value) + "j"195                c = True196            elif token == 3 and token_type == "SQUARE_ROOT":197                if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):198                    token_value = self.get_token_value(token_value)199                if token_value[len(token_value) - 1] in ["i", "j"]:200                    value = str(np.sqrt(complex(token_value)))201                else:202                    value = str(np.sqrt(float(token_value)))203            elif token == 3 and token_type not in ["COMPLEX_NUMBER", "STRING", "FACTORIAL"]:204                value = str(token_value)205            elif token > 3 and token_type not in ["COMPLEX_NUMBER", "FACTORIAL", "OPERATOR", "SQUARE_ROOT", "IDENTIFIER", "ELLIPSIS_OPERATOR"]:206                value += str(token_value)207            elif token > 3 and token_type == "OPERATOR":208                value += str(token_value.replace('^', '**'))209            elif token > 3 and token_type == "ELLIPSIS_OPERATOR":210                value += str(token_value)211                dots = True212            elif token == 3 and token_type == "FACTORIAL":213                math = MathModule()214                value = str(math.factorial(int(token_value)))215            elif token > 3 and token_type == "COMPLEX_NUMBER":216                value += str(token_value) + "j"217                c = True218            elif token > 3 and token_type == "FACTORIAL":219                math = MathModule()220                value += str(math.factorial(int(token_value)))221            elif token > 3 and token_type == "IDENTIFIER" and token_value in constants:222                value += "constants['{}']".format(token_value)223            elif token > 3 and token_type == "IDENTIFIER":224                value += str(token_value)225            elif token > 3 and token_type == "SQUARE_ROOT":226                if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):227                    token_value = self.get_token_value(token_value)228                if token_value[len(token_value) - 1] in ["i", "j"]:229                    value += str(np.sqrt(complex(token_value)))230                else:231                    value += str(np.sqrt(float(token_value)))232            elif token >= 3 and token_type in ["DATATYPE", "KEYWORD"]:233                msg = "SyntaxError at line "+ str(self.lines) +":\nInvalid variable value '" + token_value + "'"234                self.error_message(msg, token_stream, token)235            tokens_checked += 1236        if dots:237            value = str(self.get_tokens_range(value))238        #----------------------------------------------------------239        #TYPE CHECKING & EVALUATION:240        def type_check(value):241            string = True242            if "[" in value and "]" in value:243                return244            if re.match("[0-9]", value) or value in ["True", "False", "None"] or "constants" in value:245                string = False246            if typ8 == "str" and string:247                value = str(value)248            elif typ8 == "str" and string == False:249                msg = "TypeError at line %s:\nDeclared wrong data type, %s is not string" % (self.lines, value)250                self.error_message(msg, token_stream, token)251            if typ8 == "char" and string and len(value) == 1:252                value = str(value)253            elif typ8 == "char" and string == False or typ8 == "char" and len(value) > 3:254                msg = "TypeError at line %s:\nDeclared wrong data type, %s is not char" % (self.lines, value)255                self.error_message(msg, token_stream, token)256            if typ8 == "int" and string == False and value not in ["True", "False", "None"]:257                try:258                    value = eval(value)259                    value = int(value)260                except NameError:261                    pass262            elif typ8 == "int" and string == True or typ8 == "int" and value in ["True", "False", "None"]:263                msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not integer" % (self.lines, value)264                self.error_message(msg, token_stream, token)265            if typ8 == "float" and string == False and value not in ["True", "False", "None"]:266                try:267                    value = eval(value)268                    value = float(value)269                except NameError:270                    pass271            elif typ8 == "float" and string == True or typ8 == "float" and value in ["True", "False", "None"]:272                msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not float" % (self.lines, value)273                self.error_message(msg, token_stream, token)274            if typ8 == "complex" and string == False and value not in ["True", "False", "None"]:275                try:276                    value = eval(value)277                    value = 'Complex({}, {})'.format(value.real, value.imag)278                except NameError:279                    pass280            elif typ8 == "complex" and string == True or typ8 == "complex" and value in ["True", "False", "None"]:281                msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not complex number" % (282                self.lines, value)283                self.error_message(msg, token_stream, token)284            if typ8 == "bool" and value in ["True", "False", "None"]:285                try:286                    value = bool(value)287                except NameError:288                    pass289            elif typ8 == "bool" and value not in ["True", "False", "None"]:290                msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not boolean" % (self.lines, value)291                self.error_message(msg, token_stream, token)292        if var_decl == False:293            string = True294            type_check(value)295        #---------------------------------------------------------296        if var_decl == False:297            ast['variable_declaration'].append({'value': value})298        if inScope == False:299            self.ast['main_scope'].append(ast)300        self.symbol_table.append([ast['variable_declaration'][0]['type'], ast['variable_declaration'][1]['name'], ast['variable_declaration'][2]['value']]) # type, name, value301        self.token_index += tokens_checked302        return [ast, tokens_checked]303    def parse_variable(self, token_stream, inScope):304        tokens_checked = 0305        ast = {'variable_declaration': []}306        value = ""307        typ8 = ""308        c = False309        var_decl = False310        square_root = False311        dots = False312        for token in range(0, len(token_stream)):313            token_type = token_stream[tokens_checked][0]314            token_value = token_stream[tokens_checked][1]315            # If  semic is found loop breaks316            if token_type in ["SEMIC", "NEWLINE"]:317                break318            elif token == 0 and token_type == "IDENTIFIER":319                typ8 = self.get_token_type(token_value)320                ast['variable_declaration'].append({'type': typ8})321                ast['variable_declaration'].append({'name': token_value})322            elif token == 0 and token_type != "IDENTIFIER":323                msg = ("SyntaxError at line "+ str(self.lines) +"\nInvalid variable name '" + token_value + "'")324                self.error_message(msg, token_stream, token)325            elif token == 1 and token_type not in ["OPERATOR", "INCREMENT_OPERATOR"]:326                msg = "SyntaxError at line {}:\nInvalid operator '{}'".format(self.lines, token_value)327                self.error_message(msg, token_stream, token)328            elif token == 2 and token_type == "IDENTIFIER" and token_value not in constants and token_stream[tokens_checked + 1][1] != ":":329                value = str(token_value)330            elif token == 2 and token_type == "IDENTIFIER" and token_value in constants:331                value = "constants['{}']".format(token_value)332            elif token == 2 and token_type == "STRING":333                value = token_value.replace('\s', ' ')334            elif token == 2 and token_type == "COMPLEX_NUMBER":335                value = str(token_value) + "j"336                c = True337            elif token == 2 and token_type == "SQUARE_ROOT":338                if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):339                    token_value = self.get_token_value(token_value)340                if token_value[len(token_value) - 1] in ["i", "j"]:341                    value = str(np.sqrt(complex(token_value)))342                else:343                    value = str(np.sqrt(float(token_value)))344            elif token == 2 and token_type not in ["COMPLEX_NUMBER", "STRING", "FACTORIAL"]:345                value = str(token_value)346            elif token > 2 and token_type not in ["COMPLEX_NUMBER", "FACTORIAL", "OPERATOR", "SQUARE_ROOT", "ELLIPSIS_OPERATOR"]:347                value += str(token_value)348            elif token > 2 and token_type == "OPERATOR":349                value += str(token_value.replace('^', '**'))350            elif token > 2 and token_type == "ELLIPSIS_OPERATOR":351                value += str(token_value)352                dots = True353            elif token == 2 and token_type == "FACTORIAL":354                math = MathModule()355                value = str(math.factorial(int(token_value)))356            elif token > 2 and token_type == "COMPLEX_NUMBER":357                value += str(token_value) + "j"358                c = True359            elif token > 2 and token_type == "FACTORIAL":360                math = MathModule()361                value += str(math.factorial(int(token_value)))362            elif token > 2 and token_type == "IDENTIFIER" and token_value in constants:363                value += "constants['{}']".format(token_value)364            elif token > 2 and token_type == "SQUARE_ROOT":365                if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):366                    token_value = self.get_token_value(token_value)367                if token_value[len(token_value) - 1] in ["i", "j"]:368                    value += str(np.sqrt(complex(token_value)))369                else:370                    value += str(np.sqrt(float(token_value)))371            tokens_checked += 1372        if dots:373            value = str(self.get_tokens_range(value))374        #TYPE CHECKING & EVALUATION:375        #----------------------------------------------------------376        string = True377        def type_check(value):378            if re.match("[0-9]", value) or value in ["True", "False", "None"]:379                string = False380            if typ8 == "str" and string:381                value = str(value)382            elif typ8 == "str" and string == False:383                msg = "TypeError at line %s:\nDeclared wrong data type, %s is not string" % (self.lines, value)384                self.error_message(msg, token_stream, token)385            if typ8 == "char" and string and len(value) == 1:386                value = str(value)387            elif typ8 == "char" and string == False or typ8 == "char" and len(value) > 3:388                msg = "TypeError at line %s:\nDeclared wrong data type, %s is not char" % (self.lines, value)389                self.error_message(msg, token_stream, token)390            if typ8 == "int" and string == False and value not in ["True", "False", "None"]:391                try:392                    value = eval(value)393                    value = int(value)394                except NameError:395                    pass396            elif typ8 == "int" and string == True or typ8 == "int" and value in ["True", "False", "None"]:397                msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not integer" % (self.lines, value)398                self.error_message(msg, token_stream, token)399            if typ8 == "float" and string == False and value not in ["True", "False", "None"]:400                try:401                    value = eval(value)402                    value = float(value)403                except NameError:404                    pass405            elif typ8 == "float" and string == True or typ8 == "float" and value in ["True", "False", "None"]:406                msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not float" % (self.lines, value)407                self.error_message(msg, token_stream, token)408            if typ8 == "bool" and value in ["True", "False", "None"]:409                try:410                    value = bool(value)411                except NameError:412                    pass413            elif typ8 == "bool" and value not in ["True", "False", "None"]:414                msg = "TypeError at line %s:\nDeclared wrong data type, '%s' is not boolean" % (self.lines, value)415                self.error_message(msg, token_stream, token)416        #---------------------------------------------------------417        if var_decl == False:418            ast['variable_declaration'].append({'value': value})419        if inScope == False:420            self.ast['main_scope'].append(ast)421        for i in self.symbol_table:422            if i[1] == ast['variable_declaration'][1]['name']:423                #Change delcared varaible value to this one424                i[2] = ast['variable_declaration'][2]['value']425        self.token_index += tokens_checked426        return [ast, tokens_checked]427    def get_scope(self, token_stream):428        nesting_count = 1429        tokens_checked = 0430        scope_tokens = []431        for token in token_stream:432            tokens_checked += 1433            token_value = token[1]434            token_type = token[0]435            if token_type == "SCOPE_DEFINIER" and token_value == "{":436                nesting_count += 1437            elif token_type == "SCOPE_DEFINIER" and token_value == "}":438                nesting_count -= 1439            if nesting_count == 0:440                scope_tokens.append(token)441                break442            else:443                scope_tokens.append(token)444        return [scope_tokens, tokens_checked]445    def parse_scope(self, token_stream, statement_ast, astName, isNested, macros, match_case=False):446        ast = {'scope': []}447        tokens_checked = 0448        lines = 1449        while tokens_checked < len(token_stream):450            token_type = token_stream[tokens_checked][0]451            token_value = token_stream[tokens_checked][1]452            if match_case:453                case = self.parse_case(token_stream[tokens_checked + 1:len(token_stream)])454                ast['scope'].append(case[0])455                tokens_checked += case[1]456            # If token is echo add tokens to parse_include()457            if token_type == "KEYWORD" and token_value == "include":458                include = self.parse_include(token_stream[tokens_checked:len(token_stream)])459                ast['scope'].append(include[0])460                tokens_checked += include[1]461            elif token_type == "DATATYPE":462                var = self.parse_decl_variable(token_stream[tokens_checked:len(token_stream)], True)463                ast['scope'].append(var[0])464                tokens_checked += var[1]465            elif token_type == "IDENTIFIER" and token_stream[tokens_checked + 1][1] == "=" or token_type == "IDENTIFIER" and token_stream[tokens_checked + 1][0] == "INCREMENT_OPERATOR":466                varx = self.parse_variable(token_stream[tokens_checked:len(token_stream)], True)467                ast['scope'].append(varx[0])468                tokens_checked += varx[1]469            elif token_type == "BUILT_IN_FUNCTION":470                builtin = self.parse_builtin(token_stream[tokens_checked:len(token_stream)], True)471                ast['scope'].append(builtin[0])472                tokens_checked += builtin[1]473            elif token_type == "MATH_FUNCTION":474                math = self.parse_math(token_stream[tokens_checked:len(token_stream)], True)475                ast['scope'].append(math[0])476                tokens_checked += math[1]477            elif token_type == "KEYWORD" and token_value == "if" or token_value == "else" or token_value == "elseif":478                condtitional = self.parse_conditional_statements(token_stream[tokens_checked:len(token_stream)], True)479                ast['scope'].append(condtitional[0])480                tokens_checked += condtitional[1] - 1481            elif token_type == "KEYWORD" and token_value == "for":482                loop = self.parse_loop(token_stream[tokens_checked:len(token_stream)], True)483                ast['scope'].append(loop[0])484                tokens_checked += loop[1]485            elif token_type == "KEYWORD" and token_value == "while":486                loop = self.parse_loop(token_stream[tokens_checked:len(token_stream)], True)487                ast['scope'].append(loop[0])488                tokens_checked += loop[1]489            elif token_type == "KEYWORD" and token_value == "func":490                function = self.parse_func(token_stream[tokens_checked:len(token_stream)], True)491                ast['scope'].append(function[0])492                tokens_checked += function[1]493            elif token_type == "KEYWORD" and token_value == "return":494                return_statement = self.parse_return(token_stream[tokens_checked:len(token_stream)], True)495                ast['scope'].append(return_statement[0])496                tokens_checked += return_statement[1]497            elif token_type == "COMMENT" and token_value == r"\\":498                comment = self.parse_single_line_comment(token_stream[tokens_checked:len(token_stream)], True)499                ast['scope'].append(comment[0])500                tokens_checked += comment[1]501            elif token_type == "COMMENT" and token_value == "|**":502                comment = self.parse_multi_line_comment(token_stream[tokens_checked:len(token_stream)], True)503                ast['scope'].append(comment[0])504                tokens_checked += comment[1]505            elif macros == True and token_value == "define":506                define = self.parse_macros_define(token_stream[tokens_checked:len(token_stream)], True)507                ast['scope'].append(define[0])508                tokens_checked += define[1]509            try:  # If last token pass to this, it would throw error510                if token_type == "IDENTIFIER" and token_stream[tokens_checked + 1][0] == "COLON":511                    run = self.call_func(token_stream[tokens_checked:len(token_stream)], True)512                    ast['scope'].append(run[0])513                    tokens_checked += run[1]514            except:515                pass516            if token_type == "NEWLINE":517                self.lines += 1518            if token_value == "}":519                self.nesting_count += 1520            tokens_checked += 1521        self.token_index += self.nesting_count + 1522        self.lines -= 1523        statement_ast[astName].append(ast)524        if isNested == False:525            self.ast['main_scope'].append(statement_ast)526    def parse_builtin(self, token_stream, inScope):527        tokens_checked = 0528        value = ""529        ast = {'builtin_function': []}530        execute = False531        dots = False532        for token in range(0, len(token_stream)):533            token_type = token_stream[tokens_checked][0]534            token_value = token_stream[tokens_checked][1]535            if token_type == "SEMIC": break536            if token == 0 and token_type == "BUILT_IN_FUNCTION":537                ast['builtin_function'].append({'function': token_value})538            elif token == 1 and token_type == "IDENTIFIER" and token_value not in constants:539                if token_stream[0][1] == "execute":540                    value = self.get_token_value(token_value)541                elif token_stream[0][1] == "input":542                    ast['builtin_function'].append({'type' : self.get_token_type(token_value)})543                    value = str(token_value)544                else:545                    value = str(token_value)546            elif token == 1 and token_type == "IDENTIFIER" and token_value in constants:547                value = "constants['{}']".format(token_value)548            elif token == 1 and token_type not in ["IDENTIFIER", "FACTORIAL", "SQUARE_ROOT"]:549                value = token_value550            elif token == 1 and token_type == "FACTORIAL":551                math = MathModule()552                value = str(math.factorial(int(token_value)))553            elif token == 1 and token_type == "SQUARE_ROOT":554                if re.match("[a-z]", token_value) or re.match("[A-Z]", token_value):555                    token_value = str(self.get_token_value(token_value))556                if "Complex(" in token_value and ")" in token_value:557                    value = str(np.sqrt(token_value))558                else:559                    value = str(np.sqrt(float(token_value)))560            elif token > 1 and token_type == "ELLIPSIS_OPERATOR":561                value += str(token_value)562                dots = True563            elif token > 1 and token_type == "FACTORIAL":564                math = MathModule()565                value += str(math.factorial(int(token_value)))566            elif token > 1 and token_type not in ["FACTORIAL", "OPERATOR", "IDENTIFIER"]:567                value += str(token_value)568            elif token > 1 and token_type == "OPERATOR":569                value += str(token_value.replace('^', '**'))570            elif token > 1 and token_type == "IDENTIFIER" and token_value not in constants:571                if token_stream[0][1] == "execute":572                    value += self.get_token_value(token_value)573                else:574                    value += str(token_value)575            elif token > 1 and token_type == "IDENTIFIER" and token_value in constants:576                value += "constants['{}']".format(token_value)577            tokens_checked += 1578        if dots:579            value = str(self.get_tokens_range(value))580        if type(value) == int:581            value = int(value)582        elif type(value) == float:583            value = float(value)584        elif type(value) == complex:585            fmath = MathModule()586            value = fmath.complex(value)587        ast['builtin_function'].append({'argument': value})588        if inScope == False:589            self.ast['main_scope'].append(ast)590        self.token_index += tokens_checked591        return [ast, tokens_checked]592    def parse_return(self, token_stream, inScope):593        tokens_checked = 0594        value = ""595        ast = {'return': []}596        for token in range(0, len(token_stream)):597            token_type = token_stream[tokens_checked][0]598            token_value = token_stream[tokens_checked][1]599            if token_type == "SEMIC": break600            if token == 1 and token_type == "IDENTIFIER":601                value = token_value602            elif token == 1 and token_type == "IDENTIFIER" and token_stream[tokens_checked + 1][0] == "COLON":603                value = token_value604            elif token == 1 and token_type != "IDENTIFIER":605                value = token_value606            elif token == 1 and token_type == "FACTORIAL":607                math = MathModule()608                value = str(math.factorial(int(token_value)))609            elif token > 1 and token_type == "FACTORIAL":610                math = MathModule()611                value += str(math.factorial(int(token_value)))612            elif token > 1 and token_type != "FACTORIAL":613                value += token_value614            tokens_checked += 1615        if type(value) in [int, float]:616            try:617                value = eval(value)618            except:619                pass620        elif type(value) == float:621            value = float(value)622        elif type(value) == complex:623            try:624                value = complex(value)625            except:626                pass627        ast['return'].append({'argument': value})628        if inScope == False:629            self.ast['main_scope'].append(ast)630        self.token_index += tokens_checked631        return [ast, tokens_checked]632    def parse_conditional_statements(self, token_stream, isNested):633        tokens_checked = 0634        condition = ""635        els = False636        tokens = []637        ast = {'conditional_statement': []}638        for token in range(0, len(token_stream)):639            token_type = token_stream[tokens_checked][0]640            token_value = token_stream[tokens_checked][1]641            if token_type == "SCOPE_DEFINIER" and token_value == "{":642                break643            elif token == 0 and token_value == "if":644                ast['conditional_statement'].append({'keyword': token_value})645            elif token == 0 and token_value == "else":646                ast['conditional_statement'].append({'keyword': token_value})647                els = True648            elif token == 1 and token_type != "FACTORIAL":649                condition = token_value650            elif token == 1 and token_type == "FACTORIAL":651                math = MathModule()652                condition = str(math.factorial(int(token_value)))653            elif token > 1 and token_type == "FACTORIAL":654                math = MathModule()655                condition += str(math.factorial(int(token_value)))656            elif token > 1 and token_type != "FACTORIAL":657                condition += token_value.replace("mod", "%")658            tokens_checked += 1659        if els == False:660            ast['conditional_statement'].append({'condition': condition})661        self.token_index += tokens_checked662        scope_tokens = self.get_scope(token_stream[tokens_checked + 1:len(token_stream)])663        if isNested == False:664            self.parse_scope(scope_tokens[0], ast, 'conditional_statement', False, False)665        else:666            self.parse_scope(scope_tokens[0], ast, 'conditional_statement', True, False)667        tokens_checked += scope_tokens[1]668        return [ast, tokens_checked]669    def get_token_value(self, token):670        for variable in self.symbol_table:671            if variable[1] == token: return variable[2]672    def get_token_type(self, token):673        for variable in self.symbol_table:674            if variable[1] == token: return variable[0]675    def find_token_type(self, token):676        #int677        try:678            token = int(token)679            datatype = 'int'680        except:681            pass682    def get_tokens_range(self, value):683        amount = 0684        if "..." in value:685            value = value.split('...')686            amount = 1687        elif ".." in value:688            value = value.split('..')689            amount = 0690        arr = []691        try:692            value[0], value[1] = int(value[0]), int(value[1])693            for i in range(value[0], value[1] + amount): # startValue to endValue694                arr.append(i)695        except:696            startValue, endValue = value[0].replace("'", "").replace('"', ''), value[1].replace("'", "").replace('"', '')697            for i in range(ord(startValue), ord(endValue) + amount):698                arr.append(chr(i))699        return arr700    def get_token_match(self, start_matcher, end_matcher, token_stream):701        tokens = []702        tokens_checked = 0703        for token in token_stream:704            tokens_checked += 1705            if token[1] == end_matcher:706                return [tokens, tokens_checked - 1]707            else:708                tokens.append(token)709        return False710    def parse_loop(self, token_stream, isNested):711        # for x :: x < 10 :: x++ {712        tokens_checked = 0713        keyword = ""714        condition = ""715        value = ""716        increment = ""717        var_decl = False718        ast = {'loop': []}719        while tokens_checked < len(token_stream):720            token_type = token_stream[tokens_checked][0]721            token_value = token_stream[tokens_checked][1]722            if token_type == "SCOPE_DEFINIER" and token_value == "{":723                break724            if tokens_checked == 0:725                ast['loop'].append({'keyword': token_value})726                keyword = token_value727            if tokens_checked == 1 and keyword == "for":728                tokens = self.get_token_match("::", "{", token_stream)729                inner_tokens = [i[1] for i in tokens[0]]730                if "in" in inner_tokens:731                    array = ""732                    data_type = self.get_token_type(inner_tokens[3])733                    ast['loop'].append({'name': inner_tokens[1]})734                    ast['loop'].append({'type': data_type})735                    ast['loop'].append({'array': ''.join(inner_tokens[3:])})736                    self.symbol_table.append([data_type, inner_tokens[1], inner_tokens[3:]])737                else:738                    if len([i for i, x in enumerate(inner_tokens) if x == "::"]) != 2:739                        self.error_message("SyntaxError:\nSymbol '::' is missing in a for loop", token_stream, tokens_checked)740                    inner_tokens[:] = [x for x in inner_tokens if x != '::']741                    ast['loop'].append({'name': inner_tokens[1]})742                    ast['loop'].append({'start_value': self.get_token_value(inner_tokens[2])})743                    ast['loop'].append({'end_value': inner_tokens[4]})744                    if "++" in inner_tokens[5]:745                        ast['loop'].append({'increment': "1"})746                    elif "--" in inner_tokens[5]:747                        ast['loop'].append({'increment': "-1"})748                tokens_checked += tokens[1]749                break750            elif keyword == "while":751                if tokens_checked == 1: condition = token_value752                elif tokens_checked == 2 and token_type != "FACTORIAL":753                    condition += token_value754                elif tokens_checked == 2 and token_type == "FACTORIAL":755                    math = MathModule()756                    condition = str(math.factorial(int(token_value)))...block.py
Source:block.py  
1import re2#import ply.lex as lex3def loadSymbolTable():4	5	symbolTable["keyword"] = keyword6	symbolTable["dataType"] = dataType7	symbolTable["preDefRoutine"] = preDefRoutine8lb = 09fp = 110def validLexeme(string):11	12	res = False13	if(string in keyword):14		#print("key " + string + "\n")15		res = "keyword"16	elif(string in dataType):17		#print("dataType " + string + "\n")18		res = "dataType"19	elif(string in preDefRoutine):20		res = "preDefRoutine"21	elif(re.match(identifier, string)):22		#print("id " + string + "\n")23		res = "identifier"24	elif(re.match(punctuator, string)):25		#print("punc " + string)26		res = "punctuator"27	elif(re.match(number, string)):28		res = "number"29	elif(re.match(aritmeticOperator, string)):30		res = "arithmeticOperator"31	elif(re.match(assignmentOperator, string)):32		res = "assignmentOperator"33	elif(string in relationalOperator):34		res = "relationalOperator"35	elif(string in logicalOperator):36		res = "logicalOperator"37	elif(string == "#"):38		res = "hashOperator"39	elif(string == ".h"):40		res = "headerExtension"41	elif(string == "true" or string == "false"):42		res = "boolean"43	elif(string == "++"):44		res = "incrementOperator"45	elif(string == "--"):46		res = "decrementOperator"47	return res48top = 0;49i_ = 1;50tmp = "";51li = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]52def push(val):53	global top,li54	top = top+155	li[top]=val;56	57def codegen():58	global tmp,i_,top,li59	tmp = "t"60	tmp+=str(i_)61	print(tmp +" = "+str(li[top-2]), str(li[top-1]), str(li[top]));62	top-=2;63	li[top]=tmp64	i_=i_+1;65def codegen_umin():66	global tmp,i_,top,li67	tmp = "t"68	tmp+=str(i_)69	print(tmp+" = -"+str(li[top]));70	top=top-1;71	li[top]=tmp;72	i_=i_+1;73def codegen_assign():74	global tmp,i_,top,li75	print(str(li[top-1])+" = "+str(li[top]));76	top=top-2;77label = 178def lab1():79	global label80	print("L"+str(label)+":")81	label = label+182	83def lab2():84	global tmp,i_,top,li,label85	tmp = "t"86	tmp+=str(i_)87	print(tmp+" =  "+li[top-2],li[top-1],li[top]);88	print("if "+tmp+" goto L"+str(label-1));89	i_=i_+1;90	label = label-1;91	top = top-3;92def lexer():93	global lb94	global fp95	96	lexeme = prg[lb:fp]97	98	while(re.match(spaces, lexeme)):99		#print("x " + lexeme + "\n")100		lb = lb + 1101		fp = fp + 1102		lexeme = prg[lb:fp]103	104	#if(re.match(spaces, prg[105	#print("lexeme: " + lexeme + " type: " + str(type(lexeme)) + "\n");106	res = validLexeme(lexeme)107	while((not res) and (fp <= len(prg))):108		#print("lexeme1: " + lexeme + "\n")109		fp = fp + 1110		lexeme = prg[lb:fp]111		res = validLexeme(lexeme)112	113	#print(lexeme + "\n")114	tokenType = res115	res = validLexeme(lexeme)116	while((res) and (fp <= len(prg))):117		#print("lexeme2: " + lexeme + "\n")118		fp = fp + 1119		lexeme = prg[lb:fp]120		tokenType = res121		res = validLexeme(lexeme)122	123	lexeme = prg[lb:fp - 1]124	lb = fp - 1125	126	if((tokenType != False) and (tokenType not in symbolTable)):127		symbolTable[tokenType] = list()128		129	if((tokenType != False) and lexeme not in symbolTable[tokenType]):130		symbolTable[tokenType].append(lexeme.strip())131	132	#print("TOKEN: " + str(lexeme) + " TYPE: " + str(tokenType) + "\n");133	#print(str(lb) + " " + str(fp) + "\n")134	#print(str(len(prg)))135	return dict({tokenType:lexeme})136def parse_start():137	status = program()138	139	print("SUCCESSFUL PARSING\n") if(status == 0) else print("FAILED PARSING\n")140	141def program():142	status = preProcessorDirective()143	144	if(status == 0):145		status = externDeclaration()146		147		if(status == 0):148			status = mainFunction()149	150	return status151def preProcessorDirective():152	status = 0153	token = lexer()154	155	token_type = list(token.keys())[0]156	token_value = list(token.values())[0]157	158	if(token_type == "hashOperator"):159		160		token = lexer()161		token_type = list(token.keys())[0]162		token_value = list(token.values())[0]163		164		if(token_type == "keyword" and token_value == "include"):165				166			token = lexer()167			token_type = list(token.keys())[0]168			token_value = list(token.values())[0]169			170			if(token_type == "relationalOperator" and token_value == "<"):171				172				token = lexer()173				token_type = list(token.keys())[0]174				token_value = list(token.values())[0]175				176				if(token_type == "identifier"):177					178					token = lexer()179					token_type = list(token.keys())[0]180					token_value = list(token.values())[0]181					182					183					if(token_type == "headerExtension"):184					185						token = lexer()186						token_type = list(token.keys())[0]187						token_value = list(token.values())[0]	188					189						if(token_type == "relationalOperator" and token_value == ">"):190					191								status = preProcessorDirective()192								#print(str(status) + " after return\n")193							194						else:195							print("Syntax error: expected '>' but received " + str(token_value) + "\n")196							status = 1197					else:198						print("Syntax error: expected 'Header Extension' but received " + str(token_value) + "\n")199						status = 1200						201				else:202					print("Syntax error: expected 'Identifer' but received " + str(token_value) + "\n")203					status = 1204			else:	205				print("Syntax error: expected '<' but received " + str(token_value) + "\n")206				status = 1207				208		elif(token_type == "keyword" and token_value == "define"):209			210			211			token = lexer()212			token_type = list(token.keys())[0]213			token_value = list(token.values())[0]214			215			if(token_type == "identifier"):216				217				variableName = token_value218				token = lexer()219				token_type = list(token.keys())[0]220				token_value = list(token.values())[0]221				222				if(token_type == "number"):223					224					variableValue = int(token_value.strip())225					symbolTable[variableName] = variableValue226					status = preProcessorDirective()227					228					229				else:230					print("Syntax error: expected 'Number' but received " + str(token_value) + "\n")231					status = 1232			else:233				print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")234				status = 1235					236		else:237			print("Syntax error: expected 'Keyword include/define' but received " + str(token_value) + "\n")238			status = 1239	else:240		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED241		global lb, fp242		lb = lb - len(token_value)243		fp = fp - len(token_value)244		245	return status246	#print("Token key: " + str((token_type) + " values: " + str(token_value) + "\n"))	247def externDeclaration():248	249	250	status = 0251	token = lexer()252	token_type = list(token.keys())[0]253	token_value = list(token.values())[0]254	if(token_type == "keyword" and token_value == "extern"):255		status = declarationStatement()256		if(status == 0):257		258			token = lexer()259			token_type = list(token.keys())[0]260			token_value = list(token.values())[0].strip()261			if(not (token_type == "punctuator" and token_value == ";")):262				print("Syntax error: expected 'Punctuator Semicolon1' but received " + str(token_value) + "\n")263				status = 1264	else:265		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED266		global lb, fp267		lb = lb - len(token_value)268		fp = fp - len(token_value)	269	return status270def declarationStatement():271	272	status = 0273	token = lexer()274	token_type = list(token.keys())[0]275	token_value = list(token.values())[0]276	if(token_type == 'dataType'):277		278		dataType = token_value.strip()279		status = variable(dataType)280		281	else:282		print("Syntax error: expected 'Data Type' but received " + str(token_value) + "\n")283		status = 1284	285	return status286	287def optionalDeclarationStatement():288	289	#print("IN OPTDECL")290	status = 0291	token = lexer()292	token_type = list(token.keys())[0]293	token_value = list(token.values())[0]294	#print("before reset: " + str(token_value))295	if(token_type == 'dataType'):296	297		298		dataType = token_value.strip()299		status = variable(dataType)300		301	else:302	303		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED304		#print("resetting")305		global lb, fp306		lb = lb - len(token_value)307		fp = fp - len(token_value)308		status = 2309		"""310		if(token_value != "do"):311			token = lexer()312			token_type = list(token.keys())[0]313			token_value = list(token.values())[0]314		"""315		#print("after reset: " + str(token_value))316	return status317	318	319def variable(dataType):320	status = 0321	token = lexer()322	token_type = list(token.keys())[0]323	token_value = list(token.values())[0]324	325	if(token_type == 'identifier'):326		327		#print("received identifier, " + str(token_value))328		variableName = token_value.strip()329		330		if(dataType not in externalVariables):331			externalVariables[dataType] = list()332		333		if(variableName not in externalVariables[dataType]):334			externalVariables[dataType].append(variableName)335		else:336			print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")337			status = 1338		#externalVariables.append([variableName, dataType])339		if(status==0):340			status = variableDash(dataType)341	else:342		print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")343		status = 1344	345	return status346def variableDash(dataType):347	status = 0348	token = lexer()349	token_type = list(token.keys())[0]350	token_value = list(token.values())[0]351	352	if(token_type == 'punctuator' and token_value == ','):353		354		token = lexer()355		token_type = list(token.keys())[0]356		token_value = list(token.values())[0]357	358		if(token_type == 'identifier'):359			360			variableName = token_value.strip()361			if(dataType not in externalVariables):362				externalVariables[dataType] = list() 363		364			if(variableName not in externalVariables[dataType]):365				externalVariables[dataType].append(variableName)366			else:367				print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")368				status = 1369			if(status==0):370				variableDash(dataType)371		372		else:373			print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")374			status = 1375	else:376		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED377		global lb, fp378		#print(token_value)379		#print(str(lb) + " " + str(fp))380		lb = lb - len(token_value)381		fp = fp - len(token_value)382		#print(str(lb) + " " + str(fp))383	return status384	385def mainFunction():386	status = 0387	token = lexer()388	token_type = list(token.keys())[0]389	token_value = list(token.values())[0]390	391	if(token_type == "dataType" and token_value == "int"):392		393		status = mainDash()394		395	else:396		print("Syntax error: expected 'Return Type Integer' but received " + str(token_value) + "\n")397		status = 1398	399	return status400	401	402def mainDash():403	status = 0404	token = lexer()405	token_type = list(token.keys())[0]406	token_value = list(token.values())[0].strip()407	408	#print(str(token_type) + " " + str(token_value))409	410	if(token_type == "identifier" and token_value == "main"):411	412		token = lexer()413		token_type = list(token.keys())[0]414		token_value = list(token.values())[0].strip()415		416		if(token_type == "punctuator" and token_value == "("):417		418			token = lexer()419			token_type = list(token.keys())[0]420			token_value = list(token.values())[0].strip()421			422			if(token_type == "punctuator" and token_value == ")"):423			424				token = lexer()425				token_type = list(token.keys())[0]426				token_value = list(token.values())[0].strip()427				428				if(token_type == "punctuator" and token_value == "{"):429				430					status = statements()431					432					if(status == 0):433						434						token = lexer()435						token_type = list(token.keys())[0]436						token_value = list(token.values())[0].strip()437						#print(token_value + str(len(token_value)))438						if(not(token_type == "punctuator" and token_value == "}")):439							print("Syntax error: expected 'Punctuator1 close curly bracket' but received " + str(token_value) + "\n")440							status = 1441				else:442					print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")443					status = 1444						445				446			447			elif(token_type == "dataType" and token_value == "int"):448			449				token = lexer()450				token_type = list(token.keys())[0]451				token_value = list(token.values())[0].strip()452				453				if(token_type == "identifier" and token_value == "argc"):454				455					token = lexer()456					token_type = list(token.keys())[0].strip()457					token_value = list(token.values())[0].strip()458					459					if(token_type == "punctuator" and token_value == ","):460				461						token = lexer()462						token_type = list(token.keys())[0]463						token_value = list(token.values())[0].strip()464						465						if(token_type == "dataType" and token_value == "char"):466				467							token = lexer()468							token_type = list(token.keys())[0]469							token_value = list(token.values())[0].strip()470							471							if(token_type == "arithmeticOperator" and token_value == "*"):472				473								token = lexer()474								token_type = list(token.keys())[0]475								token_value = list(token.values())[0]	.strip()476								477								if(token_type == "identifier" and token_value == "argv"):478				479									token = lexer()480									token_type = list(token.keys())[0]481									token_value = list(token.values())[0].strip()482									483									if(token_type == "punctuator" and token_value == "["):484				485										token = lexer()486										token_type = list(token.keys())[0]487										token_value = list(token.values())[0].strip()488										489										if(token_type == "punctuator" and token_value == "]"):490				491											token = lexer()492											token_type = list(token.keys())[0]493											token_value = list(token.values())[0].strip()494											495											if(token_type == "punctuator" and token_value == ")"):496				497												token = lexer()498												token_type = list(token.keys())[0]499												token_value = list(token.values())[0].strip()500											501												if(token_type == "punctuator" and token_value == "{"):502				503													status = statements()504					505													if(status == 0):506						507														token = lexer()508														token_type = list(token.keys())[0]509														token_value = list(token.values())[0].strip()510				511														if(not(token_type == "punctuator" and token_value == "}")):512															print("Syntax error: expected 'Punctuator2 close curly bracket' ", end = "")513															print("but received " + str(token_value) + "\n")514															status = 1515												else:516													print("Syntax error: expected 'Punctuator open curly bracket'  ", end = "")517													print("but received " + str(token_value) + "\n")518													status = 1519											520											else:521												print("Syntax error: expected 'Punctuator close round bracket' but received ", end = "")522												print(str(token_value) + "\n")523												status = 1524											525										else:526											print("Syntax error: expected 'Punctuator close square bracket' but received ", end = "")527											print(str(token_value) + "\n")528											status = 1529									else:530										print("Syntax error: expected 'Punctuator open square bracket' but received ", end = "")531										print(str(token_value) + "\n")532										status = 1533									534								else:535									print("Syntax error: expected 'Identifier argv' but received " + str(token_value) + "\n")536									status = 1537									538							else:539								print("Syntax error: expected 'Pointer operator *' but received " + str(token_value) + "\n")540								status = 1541							542						else:543							print("Syntax error: expected 'Data type character' but received " + str(token_value) + "\n")544							status = 1545						546					else:547						print("Syntax error: expected 'Punctuator comma' but received " + str(token_value) + "\n")548						status = 1	549				550				else:551					print("Syntax error: expected 'Identifier argc' but received " + str(token_value) + "\n")552					status = 1553				554			555			else:556				print("Syntax error: expected 'Punctuator close round bracket' but received " + str(token_value) + "\n")557				status = 1558				559		else:560			print("Syntax error: expected 'Punctuator open round bracket' but received " + str(token_value) + "\n")561			status = 1562	563	else:564		print("Syntax error: expected 'Identifier main' but received " + str(token_value) + "\n")565		status = 1566		567	return status568data = {}569def statements():570	571	572	#print("top of statements\n")573	status = 0574	status = initializationStatement()575	576	if(status == 0):577		#print("init success")578		token = lexer()579		token_type = list(token.keys())[0]580		token_value = list(token.values())[0]581		#print(token_value +" new value")582		tv = token_value.strip()583		if(token_type == "punctuator" and tv == ";"):584			status = statements()585		else:586			print("Syntax error: expected 'Punctuator semicolon2' but received " + str(token_value) + "\n")587			status = 1588			589			590	else:591		'''	token = lexer()592		token_type = list(token.keys())[0]593		token_value = list(token.values())[0]594		tv = token_value.strip()'''595		#print("dc" + " " + tv)596		597		598		status = optionalDeclarationStatement()599		#print(status)600		if(status == 0):	601			#print("decl success")602			603			token = lexer()604			token_type = list(token.keys())[0]605			token_value = list(token.values())[0]606			tv = token_value.strip()607			if(token_type == "punctuator" and tv == ";"):608				609				status = statements()610			else:611				print("Syntax error: expected 'Punctuator semicolon3' but received " + str(token_value) + "\n")612				status = 1613		else:614			615			status = assignmentStatement()616			if(status == 0):617				#print("assgn success")618				619				token = lexer()620				token_type = list(token.keys())[0]621				token_value = list(token.values())[0]622				tv = token_value.strip()623				if(token_type == "punctuator" and tv == ";"):624					status = statements()625				else:626					print("Syntax error: expected 'Punctuator semicolon4' but received " + str(token_value) + "\n")627					status = 1628			else:629				630				status = 0631				token = lexer()632				token_type = list(token.keys())[0]633				token_value = list(token.values())[0]634				#print("IN statements: " + token_value)635				if(token_type == "keyword" and token_value == "do"):636					#print("Do")637					token = lexer()638					token_type = list(token.keys())[0]639					token_value = list(token.values())[0].strip()640					lab1()641					if(token_type == "punctuator" and token_value == "{"):642						#print("{")643						status = statements()644					645						#print("status: " + str(status))646						if(status == 0):647					648							token = lexer()649							token_type = list(token.keys())[0]650							token_value = list(token.values())[0].strip()651							#print(token_value)652							if(token_type == "punctuator" and token_value == "}"):653								#print("}")654								token = lexer()655								token_type = list(token.keys())[0]656								token_value = list(token.values())[0].strip()657		658								if(token_type == "keyword" and token_value == "while"):659									#print("while")660									token = lexer()661									token_type = list(token.keys())[0]662									token_value = list(token.values())[0].strip()663		664									if(token_type == "punctuator" and token_value == "("):665										#print("(")666										status = condition()667										lab2()668										if(status == 0):669					670											token = lexer()671											token_type = list(token.keys())[0]672											token_value = list(token.values())[0].strip()673		674											if(token_type == "punctuator" and token_value == ")"):675												#print(")")676												token = lexer()677												token_type = list(token.keys())[0]678												token_value = list(token.values())[0].strip()679		680												if(token_type == "punctuator" and token_value == ";"):681													#print("in statements: " + token_value + "\n")682													status = statements()683					684												else:685													print("Syntax error: expected 'Punctuator semicolon5' ", end = "")686													print("but received " + str(token_value) + "\n")687													status = 1688					689											else:690												print("Syntax error: expected 'Punctuator close round bracket' ", end = "")691												print("but received " + str(token_value) + "\n")692												status = 1693					694									else:695										print("Syntax error: expected 'Punctuator open round bracket' ", end = "") 696										print("but received " + str(token_value) + "\n")697										status = 1698					699								else:700									print("Syntax error: expected 'Keyword while' but received " + str(token_value) + "\n")701									status = 1702					703							else:704								print("Syntax error: expected 'Punctuator10 close curly bracket' but received " + str(token_value) + "\n")705								status = 1706					elif(token_type == "identifier" or token_type == "datatype"):707						global lb, fp708						#print(token_value)709						#print(str(lb) + " " + str(fp))710						lb = lb - len(token_value)711						fp = fp - len(token_value)712						status = statement1()713				714					else:715						print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")716						status = 1717		718				719				else:720					status = 0721					tv = token_value.strip()722					#print("IN statements: " + token_value)723					if(tv == "{"):724							status = statements()725							726							#print("status: " + str(status))727							if(status == 0):728						729								token = lexer()730								token_type = list(token.keys())[0]731								token_value = list(token.values())[0].strip()732								#print(token_value)733								if(token_type == "punctuator" and token_value == "}"):734									status = statements()735								else:736									print("Error")737					else:738			739						#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED740						#global lb, fp741						#print(token_value)742						#print(str(lb) + " " + str(fp))743						lb = lb - len(token_value)744						fp = fp - len(token_value)745									746	747	return status748def statement1():749	status = 0750	status = initializationStatement()751	752	if(status == 0):753		#print("init success")754		token = lexer()755		token_type = list(token.keys())[0]756		token_value = list(token.values())[0]757		#print(token_value +" new value")758		tv = token_value.strip()759		if(token_type == "punctuator" and tv == ";"):760			status = 0761		else:762			status = 1763			print("Error")764	else:765			766			status = assignmentStatement()767			if(status == 0):768				#print("assgn success")769				770				token = lexer()771				token_type = list(token.keys())[0]772				token_value = list(token.values())[0]773				tv = token_value.strip()774				if(token_type == "punctuator" and tv == ";"):775					status = 0776				else:777					status = 1778					print("Error")779	if(status ==0):780		token = lexer()781		token_type = list(token.keys())[0]782		token_value = list(token.values())[0].strip()783		784		if(token_type == "keyword" and token_value == "while"):785				#print("while")786			token = lexer()787			token_type = list(token.keys())[0]788			token_value = list(token.values())[0].strip()789			if(token_type == "punctuator" and token_value == "("):790				#print("(")791				status = condition()792				lab2()793				if(status == 0):794					token = lexer()795					token_type = list(token.keys())[0]796					token_value = list(token.values())[0].strip()797					if(token_type == "punctuator" and token_value == ")"):798						#print(")")799						token = lexer()800						token_type = list(token.keys())[0]801						token_value = list(token.values())[0].strip()802						if(token_type == "punctuator" and token_value == ";"):803							#print("in statements: " + token_value + "\n")804							status = statements()805						else:806							print("Syntax error: expected 'Punctuator semicolon5' ", end = "")807							print("but received " + str(token_value) + "\n")808							status = 1809					else:810						print("Syntax error: expected 'Punctuator close round bracket' ", end = "")811						print("but received " + str(token_value) + "\n")812						status = 1813			else:814				print("Syntax error: expected 'Punctuator open round bracket' ", end = "") 815				print("but received " + str(token_value) + "\n")816				status = 1817		else:818			print("Syntax error: expected 'Keyword while' but received " + str(token_value) + "\n")819			status = 1820	else:821		print("Syntax error: expected 'Punctuator10 close curly bracket' but received " + str(token_value) + "\n")822		status = 1823	return status824def initializationStatement():825	status = 0826	827	global lb, fp828		829	token = lexer()830	token_type = list(token.keys())[0]831	token_value = list(token.values())[0]832	if(token_type == "dataType"):833		if(token_value not in data):834			data[token_value] = {};835			#print(token_value)836		837		status = initStat(token_value)838		839		840	else:841		842		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED843		#print(token_value)844		#print(str(lb) + " " + str(fp))845		lb = lb - len(token_value)846		fp = fp - len(token_value)847		status = 2848	#print('returning' + str(status))	849	return status850	851	852def initStat(dt):853	status = multipleInitialization(dt)854	#print(status)855	856	857	858	'''if(status != 0 and status != 2):859		status = 0860		token = lexer()861		token_type = list(token.keys())[0]862		token_value = list(token.values())[0]863		tk = token_value864		if(token_type == "identifier"):865		866			if(token_value not in data[dt]):867				data[dt][token_value]=0868			else:869				print("Syntax Error: The variable has already been initialized\n")870				return 1871			token = lexer()872			token_type = list(token.keys())[0]873			token_value = list(token.values())[0]874	875			if(token_type == "assignmentOperator" and token_value == "="):876				877				status = E(dt,tk)878				"""879				print(status)880				status = 0881				token = lexer()882				token_type = list(token.keys())[0]883				token_value = list(token.values())[0]884				print(token_value)885				"""886			887			elif(token_type == "punctuator" and token_value == ","):888			889				global lb, fp890				#print(token_value)891				#print(str(lb) + " " + str(fp))892				lb = lb - len(token_value)893				fp = fp - len(token_value)894				status = 2895			896			else:897				898				print("Syntax error: expected 'Assignment1 Operator' but received " + str(token_value) + "\n")899				status = 1 '''900		901	902	return status903		904def multipleInitialization(dt):905	global data906	status = 0907	token = lexer()908	token_type = list(token.keys())[0]909	token_value = list(token.values())[0]910	tk = token_value911	if(token_type == "identifier"):912		push(tk)913		#print(tk)914		if(token_value not in data[dt]):915				if(dt=="int"):916					data[dt][token_value]=int(0)917				elif(dt=="char"):918					data[dt][token_value]=string(0)919				elif(dt=="float"):920					data[dt][token_value]=float(0)921				elif(dt=="double"):922					data[dt][token_value]=float(0)923				else:924					data[dt][token_value]=0925				#print(" "+token_value +":)")926		else:927				print("Syntax Error: The variable has already been initialized\n")928				return 1929		930		token = lexer()931		token_type = list(token.keys())[0]932		token_value = list(token.values())[0]933		tv = token_value.strip()934		#print(token_value+" macha")935		if(tv == ";"):936			#print("; la")937			global lb, fp938			#print(token_value)939			#print(str(lb) + " " + str(fp))940			lb = lb - len(token_value)941			fp = fp - len(token_value)942			return 0;943		elif(token_type == "assignmentOperator" and tv == "="):944				945			status = E(dt,tk)946			codegen_assign()947			#print(status)948			949			if(status == 0):950				951				status = multinit(dt)952				if(status == 2):953					status = 0954				#print(status)955		elif(token_type == "punctuator" and tv == ","):956			#print(",")957			status = multipleInitialization(dt)958			'''global lb, fp959			#print(token_value)960			#print(str(lb) + " " + str(fp))961			lb = lb - len(token_value)962			fp = fp - len(token_value)963			status = 2 '''964			965		else:966				967			print("Syntax error: expected 'Assignment2 Operator' but received " + str(tv) + "\n")968			status = 1969	else:970			971		print("Syntax error: expected 'Identifier' but received " + str(tv) + "\n")972		status = 1973	974	return status975	976def multinit(dt):977	status = 0978	979	token = lexer()980	token_type = list(token.keys())[0]981	token_value = list(token.values())[0]982	tv = token_value.strip()983	984	if(token_type == "punctuator" and tv == ","):985	986		#print("got comma")987		status = multipleInitialization(dt)988	989	else:990		991		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED992		global lb, fp993		#print(token_value)994		#print(str(lb) + " " + str(fp))995		lb = lb - len(token_value)996		fp = fp - len(token_value)997		status = 2998		999	return status1000def assignmentStatement():1001	global data1002	dty =''1003	status = 01004	token = lexer()1005	token_type = list(token.keys())[0]1006	token_value = list(token.values())[0]1007	tk = token_value1008	#print("asgn")1009	if(token_type == "identifier"):1010		push(tk)1011		#print(tk)1012		for i in data:1013			for j in data[i]:1014				if(j==token_value):1015					dty = i1016		if(dty==''):1017			print("The variable "+token_value+" has not been initialized.")1018			return 11019		token = lexer()1020		token_type = list(token.keys())[0]1021		token_value = list(token.values())[0]1022	1023		if(token_type == "assignmentOperator" and token_value == "="):1024				1025			status = E(dty,tk)1026			codegen_assign()1027			1028		else:1029			1030			print("Syntax error: expected 'Assignment3 Operator' but received " + str(token_value) + "\n")1031			status = 11032	else:1033			1034		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1035		global lb, fp1036		#print(token_value)1037		#print(str(lb) + " " + str(fp))1038		lb = lb - len(token_value)1039		fp = fp - len(token_value)1040		status = 21041	1042	return status1043def condition():1044	status = 01045	1046	status = C()1047			1048	return status1049def C():1050	status = 01051	token = lexer()1052	token_type = list(token.keys())[0]1053	token_value = list(token.values())[0]1054	tv = token_value.strip()1055	if(token_type == "identifier" or token_type=="number"):1056		push(tv)1057		token = lexer()1058		token_type = list(token.keys())[0]1059		token_value = list(token.values())[0]1060		tk = token_value.strip()1061		if(token_type == "relationalOperator" or token_type == "logicalOperator"):1062			push(tk)1063			status = C() 1064		elif(token_value == ")"):1065			global lb, fp1066			#print(token_value)1067			#print(str(lb) + " " + str(fp))1068			lb = lb - len(token_value)1069			fp = fp - len(token_value)1070			return 01071		else:1072			return 11073	elif(not (token_type == "boolean")):1074		1075			print("Syntax error: expected 'Boolean' but received " + str(token_value) + "\n")1076			status = 11077	return status1078op = ""1079def E(dt,vn):1080	status = F(dt,vn)1081	if(status == 0):1082	1083		status = E1(dt,vn)1084	1085	return status1086	1087def E1(dt,vn):1088	status = 01089	token = lexer()1090	token_type = list(token.keys())[0]1091	token_value = list(token.values())[0]1092	tv = token_value.strip()1093	global op;1094	if(token_type == "arithmeticOperator" and tv == "+"):1095		op = "+"1096		push(tv)1097		#print(tv)1098		status = F(dt,vn)1099		codegen()1100		if(status == 0):1101		1102			status = E1(dt,vn)1103			1104	else:1105	1106		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1107		global lb, fp1108		#print(token_value)1109		#print(str(lb) + " " + str(fp))1110		lb = lb - len(token_value)1111		fp = fp - len(token_value)1112	return status1113def F(dt,vn):1114	status = 01115	1116	status = G(dt,vn)1117	1118	if(status == 0):1119	1120		status = F1(dt,vn)1121	return status1122	1123def F1(dt,vn):1124	status = 01125	token = lexer()1126	token_type = list(token.keys())[0]1127	token_value = list(token.values())[0]1128	tv = token_value.strip()1129	global op;1130	if(token_type == "arithmeticOperator" and tv == "-"):1131		op = "-"1132		push(tv)1133		#print(tv)1134		status = G(dt,vn)1135		codegen()1136		1137		if(status == 0):1138		1139			status = F1(dt,vn)1140			1141	else:1142	1143		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1144		global lb, fp1145		#print(token_value)1146		#print(str(lb) + " " + str(fp))1147		lb = lb - len(token_value)1148		fp = fp - len(token_value)1149	return status1150	1151def G(dt,vn):1152	status = 01153	1154	status = H(dt,vn)1155	if(status == 0):1156	1157		status = G1(dt,vn)1158	return status1159def G1(dt,vn):1160	status = 01161	1162	token = lexer()1163	token_type = list(token.keys())[0]1164	token_value = list(token.values())[0]1165	tv = token_value.strip()1166	global op;1167	if(token_type == "arithmeticOperator" and tv == "*"):1168		push(tv)1169		#print(tv)1170		op = "*"1171		status = H(dt,vn)1172		codegen()1173		if(status == 0):1174		1175			status = G1(dt,vn)1176			1177	else:1178	1179		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1180		global lb, fp1181		#print(token_value)1182		#print(str(lb) + " " + str(fp))1183		lb = lb - len(token_value)1184		fp = fp - len(token_value)1185	return status1186	1187def H(dt,vn):1188	status = 01189	1190	status = I(dt,vn)1191	1192	if(status == 0):1193	1194		status = H1(dt,vn)1195	return status1196	1197def H1(dt,vn):1198	status = 01199	1200	token = lexer()1201	token_type = list(token.keys())[0]1202	token_value = list(token.values())[0]1203	tv = token_value.strip()1204	1205	if(token_type == "arithmeticOperator" and tv == "/"):1206		global op;1207		op = "d";1208		push(tv)1209		#print(tv)1210		status = I(dt,vn)1211		codegen()1212		if(status == 0):1213		1214			status = H1(dt,vn)1215			1216	else:1217	1218		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1219		global lb, fp1220		#print(token_value + ":::" + str(len(token_value)))1221		#print(str(lb) + " " + str(fp))1222		1223		lb = lb - len(token_value)1224		fp = fp - len(token_value)1225	return status1226	1227def I(dt,vn):1228	global data1229	status = 01230	chk = 01231	token = lexer()1232	token_type = list(token.keys())[0]1233	token_value = list(token.values())[0]1234	tv = token_value.strip()1235	1236	if(token_type == "arithmeticOperator" and tv == "-"):1237		chk = 11238		push(tv)1239		#print(tv)1240		status = I()1241		codegen_umin()1242	elif(not(token_type == "identifier" or token_type == "number")):1243		print("Syntax error: expected 'Identifier/Number' but received " + str(token_value) + "\n")1244		status = 11245		return status1246	if(token_type == "identifier" or token_type == "number"):1247		push(tv)1248		#print(tv)1249	global op;1250	g = True1251	if(token_value == "identifier"):1252		if(token_value not in data[dt]):1253			print("Syntax error: The variable "+token_value+" not in "+dt)1254			g = False1255	elif(token_value == "number"):1256		if(not isinstance(token_value,dt)):1257			print("Syntax error: The variable belongs to a different type")1258			False1259	if(op=="" and g == True):1260		if(token_type == "identifier"):1261			if(chk==1):1262				data[dt][vn]=-1*data[dt][token_value]1263				chk = 01264			else:1265				#print(token_value)1266				data[dt][vn]=data[dt][token_value]1267			1268		if(token_type == "number"):1269			if(chk==1):1270				data[dt][vn]=-1*float(token_value)1271				chk = 01272			else:1273				data[dt][vn]=float(token_value)1274	elif(op=="d" and g == True):1275		if(token_type == "identifier"):1276			if(chk==1):1277				data[dt][vn]/=-1*data[dt][token_value]1278				chk = 01279				op=""1280			else:1281				data[dt][vn]/=data[dt][token_value]1282				op=""1283			1284		if(token_type == "number"):1285			if(chk==1):1286				data[dt][vn]/=-1*float(token_value)1287				chk = 01288				op = ""1289			else:1290				data[dt][vn]/=float(token_value)1291				op = ""1292	elif(op=="*" and g == True):1293		if(token_type == "identifier"):1294			if(chk==1):1295				data[dt][vn]*=-1*data[dt][token_value]1296				chk = 01297				op=""1298			else:1299				data[dt][vn]*=data[dt][token_value]1300				op=""1301			1302		if(token_type == "number"):1303			if(chk==1):1304				data[dt][vn]*=-1*float(token_value)1305				chk = 01306				op = ""1307			else:1308				data[dt][vn]*=float(token_value)1309				op = ""1310	elif(op=="-" and g == True):1311		if(token_type == "identifier"):1312			if(chk==1):1313				data[dt][vn]-=-1*data[dt][token_value]1314				chk = 01315				op=""1316			else:1317				data[dt][vn]-=data[dt][token_value]1318				op=""1319			1320		if(token_type == "number"):1321			if(chk==1):1322				data[dt][vn]-=-1*float(token_value)1323				chk = 01324				op = ""1325			else:1326				data[dt][vn]-=float(token_value)1327				op = ""1328	elif(op=="+" and g == True):1329		if(token_type == "identifier"):1330			if(chk==1):1331				data[dt][vn]+=-1*data[dt][token_value]1332				chk = 01333				op=""1334			else:1335				data[dt][vn]+=data[dt][token_value]1336				op=""1337			1338		if(token_type == "number"):1339			if(chk==1):1340				data[dt][vn]+=-1*float(token_value)1341				chk = 01342				op = ""1343			else:1344				data[dt][vn]+=float(token_value)1345				op = ""1346	return status1347	1348	1349	1350prg = open("nocomments.c").read()1351symbolTable = dict()1352externalVariables = dict()1353localVariables = list()1354keyword = ["include", "define", "while", "do", "for", "return", "extern"]1355dataType = ["void", "int", "short", "long", "char", "float", "double"]1356preDefRoutine = ["printf", "scanf"]1357#headerFile = ["stdio.h", "stdlib.h", "math.h", "string.h"]1358identifier = "^[^\d\W]\w*\Z"1359punctuator = "^[()[\]{};.,]$"1360aritmeticOperator = "^[-+*/]$"1361assignmentOperator = "^=$"1362relationalOperator = ["<", ">", "<=", ">=", "==", "!="]1363logicalOperator = ["&&", "||", "!"]1364number = "^\d+$"1365spaces = "[' ''\n''\t']"1366loadSymbolTable()1367parse_start()1368'''1369for i in data:1370	for j in data[i]:1371		print(i+" "+j+" "+str(data[i][j]))1372'''1373"""1374while lb!=len(prg):1375	lexer()1376"""1377#print(symbolTable)1378#print(externalVariables)1379"""1380PARSER ERROR CODES:13810-SUCCESS13821-FAILURE1383"""1384		...syn.py
Source:syn.py  
1import re2#import ply.lex as lex3def loadSymbolTable():4	5	symbolTable["keyword"] = keyword6	symbolTable["dataType"] = dataType7	symbolTable["preDefRoutine"] = preDefRoutine8lb = 09fp = 110def validLexeme(string):11	12	res = False13	if(string in keyword):14		#print("key " + string + "\n")15		res = "keyword"16	elif(string in dataType):17		#print("dataType " + string + "\n")18		res = "dataType"19	elif(string in preDefRoutine):20		res = "preDefRoutine"21	elif(re.match(identifier, string)):22		#print("id " + string + "\n")23		res = "identifier"24	elif(re.match(punctuator, string)):25		#print("punc " + string)26		res = "punctuator"27	elif(re.match(number, string)):28		res = "number"29	elif(re.match(aritmeticOperator, string)):30		res = "arithmeticOperator"31	elif(re.match(assignmentOperator, string)):32		res = "assignmentOperator"33	elif(string in relationalOperator):34		res = "relationalOperator"35	elif(string in logicalOperator):36		res = "logicalOperator"37	elif(string == "#"):38		res = "hashOperator"39	elif(string == ".h"):40		res = "headerExtension"41	elif(string == "true" or string == "false"):42		res = "boolean"43	elif(string == "++"):44		res = "incrementOperator"45	elif(string == "--"):46		res = "decrementOperator"47	return res48top = 0;49i_ = 1;50tmp = "";51do = 052li = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]53def push(val):54	global top,li55	top = top+156	li[top]=val;57	58def codegen():59	global tmp,i_,top,li60	tmp = "N"61	tmp+=str(i_)62	print("NODE "+tmp +" -> "+str(li[top-2])+" <-- "+ str(li[top-1])+" --> "+ str(li[top]));63	#if(do==1):64		#print("do --> "+tmp)65	top-=2;66	li[top]=tmp67	i_=i_+1;68def codegen_umin():69	global tmp,i_,top,li70	tmp = "t"71	tmp+=str(i_)72	print(tmp+" = -"+str(li[top]));73	top=top-1;74	li[top]=tmp;75	i_=i_+1;76def codegen_assign():77	global tmp,i_,top,li78	print(str(li[top-1])+" <-- = --> "+str(li[top]));79	if(do!=0):80		print("do --> =")81	else:82		print("main --> =")83	top=top-2;84label = 185def lab1():86	global label87	#print("L"+str(label)+":")88	label = label+189wh = ["dd"]	90def lab2():91	global tmp,i_,top,li,label,wh92	tmp = "N"93	tmp+=str(i_)94	print("NODE "+tmp +" -> "+str(li[top-2])+" <-- "+ str(li[top-1])+" --> "+ str(li[top]));95	#print("if "+tmp+" goto L"+str(label-1));96	i_=i_+1;97	wh[0]=tmp98	label = label-1;99	top = top-3;100def lexer():101	global lb102	global fp103	104	lexeme = prg[lb:fp]105	106	while(re.match(spaces, lexeme)):107		#print("x " + lexeme + "\n")108		lb = lb + 1109		fp = fp + 1110		lexeme = prg[lb:fp]111	112	#if(re.match(spaces, prg[113	#print("lexeme: " + lexeme + " type: " + str(type(lexeme)) + "\n");114	res = validLexeme(lexeme)115	while((not res) and (fp <= len(prg))):116		#print("lexeme1: " + lexeme + "\n")117		fp = fp + 1118		lexeme = prg[lb:fp]119		res = validLexeme(lexeme)120	121	#print(lexeme + "\n")122	tokenType = res123	res = validLexeme(lexeme)124	while((res) and (fp <= len(prg))):125		#print("lexeme2: " + lexeme + "\n")126		fp = fp + 1127		lexeme = prg[lb:fp]128		tokenType = res129		res = validLexeme(lexeme)130	131	lexeme = prg[lb:fp - 1]132	lb = fp - 1133	134	if((tokenType != False) and (tokenType not in symbolTable)):135		symbolTable[tokenType] = list()136		137	if((tokenType != False) and lexeme not in symbolTable[tokenType]):138		symbolTable[tokenType].append(lexeme.strip())139	140	#print("TOKEN: " + str(lexeme) + " TYPE: " + str(tokenType) + "\n");141	#print(str(lb) + " " + str(fp) + "\n")142	#print(str(len(prg)))143	return dict({tokenType:lexeme})144def parse_start():145	status = program()146	147	print("SUCCESSFUL PARSING\n") if(status == 0) else print("FAILED PARSING\n")148	149def program():150	status = preProcessorDirective()151	152	if(status == 0):153		status = externDeclaration()154		155		if(status == 0):156			status = mainFunction()157	158	return status159def preProcessorDirective():160	status = 0161	token = lexer()162	163	token_type = list(token.keys())[0]164	token_value = list(token.values())[0]165	166	if(token_type == "hashOperator"):167		168		token = lexer()169		token_type = list(token.keys())[0]170		token_value = list(token.values())[0]171		172		if(token_type == "keyword" and token_value == "include"):173				174			token = lexer()175			token_type = list(token.keys())[0]176			token_value = list(token.values())[0]177			178			if(token_type == "relationalOperator" and token_value == "<"):179				180				token = lexer()181				token_type = list(token.keys())[0]182				token_value = list(token.values())[0]183				184				if(token_type == "identifier"):185					186					token = lexer()187					token_type = list(token.keys())[0]188					token_value = list(token.values())[0]189					190					191					if(token_type == "headerExtension"):192					193						token = lexer()194						token_type = list(token.keys())[0]195						token_value = list(token.values())[0]	196					197						if(token_type == "relationalOperator" and token_value == ">"):198					199								status = preProcessorDirective()200								#print(str(status) + " after return\n")201							202						else:203							print("Syntax error: expected '>' but received " + str(token_value) + "\n")204							status = 1205					else:206						print("Syntax error: expected 'Header Extension' but received " + str(token_value) + "\n")207						status = 1208						209				else:210					print("Syntax error: expected 'Identifer' but received " + str(token_value) + "\n")211					status = 1212			else:	213				print("Syntax error: expected '<' but received " + str(token_value) + "\n")214				status = 1215				216		elif(token_type == "keyword" and token_value == "define"):217			218			219			token = lexer()220			token_type = list(token.keys())[0]221			token_value = list(token.values())[0]222			223			if(token_type == "identifier"):224				225				variableName = token_value226				token = lexer()227				token_type = list(token.keys())[0]228				token_value = list(token.values())[0]229				230				if(token_type == "number"):231					232					variableValue = int(token_value.strip())233					symbolTable[variableName] = variableValue234					status = preProcessorDirective()235					236					237				else:238					print("Syntax error: expected 'Number' but received " + str(token_value) + "\n")239					status = 1240			else:241				print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")242				status = 1243					244		else:245			print("Syntax error: expected 'Keyword include/define' but received " + str(token_value) + "\n")246			status = 1247	else:248		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED249		global lb, fp250		lb = lb - len(token_value)251		fp = fp - len(token_value)252		253	return status254	#print("Token key: " + str((token_type) + " values: " + str(token_value) + "\n"))	255def externDeclaration():256	257	258	status = 0259	token = lexer()260	token_type = list(token.keys())[0]261	token_value = list(token.values())[0]262	if(token_type == "keyword" and token_value == "extern"):263		status = declarationStatement()264		if(status == 0):265		266			token = lexer()267			token_type = list(token.keys())[0]268			token_value = list(token.values())[0].strip()269			if(not (token_type == "punctuator" and token_value == ";")):270				print("Syntax error: expected 'Punctuator Semicolon1' but received " + str(token_value) + "\n")271				status = 1272	else:273		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED274		global lb, fp275		lb = lb - len(token_value)276		fp = fp - len(token_value)	277	return status278def declarationStatement():279	280	status = 0281	token = lexer()282	token_type = list(token.keys())[0]283	token_value = list(token.values())[0]284	if(token_type == 'dataType'):285		286		dataType = token_value.strip()287		status = variable(dataType)288		289	else:290		print("Syntax error: expected 'Data Type' but received " + str(token_value) + "\n")291		status = 1292	293	return status294	295def optionalDeclarationStatement():296	297	#print("IN OPTDECL")298	status = 0299	token = lexer()300	token_type = list(token.keys())[0]301	token_value = list(token.values())[0]302	#print("before reset: " + str(token_value))303	if(token_type == 'dataType'):304	305		306		dataType = token_value.strip()307		status = variable(dataType)308		309	else:310	311		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED312		#print("resetting")313		global lb, fp314		lb = lb - len(token_value)315		fp = fp - len(token_value)316		status = 2317		"""318		if(token_value != "do"):319			token = lexer()320			token_type = list(token.keys())[0]321			token_value = list(token.values())[0]322		"""323		#print("after reset: " + str(token_value))324	return status325	326	327def variable(dataType):328	status = 0329	token = lexer()330	token_type = list(token.keys())[0]331	token_value = list(token.values())[0]332	333	if(token_type == 'identifier'):334		335		#print("received identifier, " + str(token_value))336		variableName = token_value.strip()337		338		if(dataType not in externalVariables):339			externalVariables[dataType] = list()340		341		if(variableName not in externalVariables[dataType]):342			externalVariables[dataType].append(variableName)343		else:344			print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")345			status = 1346		#externalVariables.append([variableName, dataType])347		if(status==0):348			status = variableDash(dataType)349	else:350		print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")351		status = 1352	353	return status354def variableDash(dataType):355	status = 0356	token = lexer()357	token_type = list(token.keys())[0]358	token_value = list(token.values())[0]359	360	if(token_type == 'punctuator' and token_value == ','):361		362		token = lexer()363		token_type = list(token.keys())[0]364		token_value = list(token.values())[0]365	366		if(token_type == 'identifier'):367			368			variableName = token_value.strip()369			if(dataType not in externalVariables):370				externalVariables[dataType] = list() 371		372			if(variableName not in externalVariables[dataType]):373				externalVariables[dataType].append(variableName)374			else:375				print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")376				status = 1377			if(status==0):378				variableDash(dataType)379		380		else:381			print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")382			status = 1383	else:384		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED385		global lb, fp386		#print(token_value)387		#print(str(lb) + " " + str(fp))388		lb = lb - len(token_value)389		fp = fp - len(token_value)390		#print(str(lb) + " " + str(fp))391	return status392	393def mainFunction():394	status = 0395	token = lexer()396	token_type = list(token.keys())[0]397	token_value = list(token.values())[0]398	399	if(token_type == "dataType" and token_value == "int"):400		401		status = mainDash()402		403	else:404		print("Syntax error: expected 'Return Type Integer' but received " + str(token_value) + "\n")405		status = 1406	407	return status408	409	410def mainDash():411	status = 0412	token = lexer()413	token_type = list(token.keys())[0]414	token_value = list(token.values())[0].strip()415	416	#print(str(token_type) + " " + str(token_value))417	418	if(token_type == "identifier" and token_value == "main"):419	420		token = lexer()421		token_type = list(token.keys())[0]422		token_value = list(token.values())[0].strip()423		424		if(token_type == "punctuator" and token_value == "("):425		426			token = lexer()427			token_type = list(token.keys())[0]428			token_value = list(token.values())[0].strip()429			430			if(token_type == "punctuator" and token_value == ")"):431			432				token = lexer()433				token_type = list(token.keys())[0]434				token_value = list(token.values())[0].strip()435				436				if(token_type == "punctuator" and token_value == "{"):437				438					status = statements()439					440					if(status == 0):441						442						token = lexer()443						token_type = list(token.keys())[0]444						token_value = list(token.values())[0].strip()445						#print(token_value + str(len(token_value)))446						if(not(token_type == "punctuator" and token_value == "}")):447							print("Syntax error: expected 'Punctuator1 close curly bracket' but received " + str(token_value) + "\n")448							status = 1449				else:450					print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")451					status = 1452						453				454			455			elif(token_type == "dataType" and token_value == "int"):456			457				token = lexer()458				token_type = list(token.keys())[0]459				token_value = list(token.values())[0].strip()460				461				if(token_type == "identifier" and token_value == "argc"):462				463					token = lexer()464					token_type = list(token.keys())[0].strip()465					token_value = list(token.values())[0].strip()466					467					if(token_type == "punctuator" and token_value == ","):468				469						token = lexer()470						token_type = list(token.keys())[0]471						token_value = list(token.values())[0].strip()472						473						if(token_type == "dataType" and token_value == "char"):474				475							token = lexer()476							token_type = list(token.keys())[0]477							token_value = list(token.values())[0].strip()478							479							if(token_type == "arithmeticOperator" and token_value == "*"):480				481								token = lexer()482								token_type = list(token.keys())[0]483								token_value = list(token.values())[0]	.strip()484								485								if(token_type == "identifier" and token_value == "argv"):486				487									token = lexer()488									token_type = list(token.keys())[0]489									token_value = list(token.values())[0].strip()490									491									if(token_type == "punctuator" and token_value == "["):492				493										token = lexer()494										token_type = list(token.keys())[0]495										token_value = list(token.values())[0].strip()496										497										if(token_type == "punctuator" and token_value == "]"):498				499											token = lexer()500											token_type = list(token.keys())[0]501											token_value = list(token.values())[0].strip()502											503											if(token_type == "punctuator" and token_value == ")"):504				505												token = lexer()506												token_type = list(token.keys())[0]507												token_value = list(token.values())[0].strip()508											509												if(token_type == "punctuator" and token_value == "{"):510				511													status = statements()512					513													if(status == 0):514						515														token = lexer()516														token_type = list(token.keys())[0]517														token_value = list(token.values())[0].strip()518				519														if(not(token_type == "punctuator" and token_value == "}")):520															print("Syntax error: expected 'Punctuator2 close curly bracket' ", end = "")521															print("but received " + str(token_value) + "\n")522															status = 1523												else:524													print("Syntax error: expected 'Punctuator open curly bracket'  ", end = "")525													print("but received " + str(token_value) + "\n")526													status = 1527											528											else:529												print("Syntax error: expected 'Punctuator close round bracket' but received ", end = "")530												print(str(token_value) + "\n")531												status = 1532											533										else:534											print("Syntax error: expected 'Punctuator close square bracket' but received ", end = "")535											print(str(token_value) + "\n")536											status = 1537									else:538										print("Syntax error: expected 'Punctuator open square bracket' but received ", end = "")539										print(str(token_value) + "\n")540										status = 1541									542								else:543									print("Syntax error: expected 'Identifier argv' but received " + str(token_value) + "\n")544									status = 1545									546							else:547								print("Syntax error: expected 'Pointer operator *' but received " + str(token_value) + "\n")548								status = 1549							550						else:551							print("Syntax error: expected 'Data type character' but received " + str(token_value) + "\n")552							status = 1553						554					else:555						print("Syntax error: expected 'Punctuator comma' but received " + str(token_value) + "\n")556						status = 1	557				558				else:559					print("Syntax error: expected 'Identifier argc' but received " + str(token_value) + "\n")560					status = 1561				562			563			else:564				print("Syntax error: expected 'Punctuator close round bracket' but received " + str(token_value) + "\n")565				status = 1566				567		else:568			print("Syntax error: expected 'Punctuator open round bracket' but received " + str(token_value) + "\n")569			status = 1570	571	else:572		print("Syntax error: expected 'Identifier main' but received " + str(token_value) + "\n")573		status = 1574		575	return status576data = {}577def statements():578	579	580	#print("top of statements\n")581	status = 0582	status = initializationStatement()583	584	if(status == 0):585		#print("init success")586		token = lexer()587		token_type = list(token.keys())[0]588		token_value = list(token.values())[0]589		#print(token_value +" new value")590		tv = token_value.strip()591		if(token_type == "punctuator" and tv == ";"):592			status = statements()593		else:594			print("Syntax error: expected 'Punctuator semicolon2' but received " + str(token_value) + "\n")595			status = 1596			597			598	else:599		'''	token = lexer()600		token_type = list(token.keys())[0]601		token_value = list(token.values())[0]602		tv = token_value.strip()'''603		#print("dc" + " " + tv)604		605		606		status = optionalDeclarationStatement()607		#print(status)608		if(status == 0):	609			#print("decl success")610			611			token = lexer()612			token_type = list(token.keys())[0]613			token_value = list(token.values())[0]614			tv = token_value.strip()615			if(token_type == "punctuator" and tv == ";"):616				617				status = statements()618			else:619				print("Syntax error: expected 'Punctuator semicolon3' but received " + str(token_value) + "\n")620				status = 1621		else:622			623			status = assignmentStatement()624			if(status == 0):625				#print("assgn success")626				627				token = lexer()628				token_type = list(token.keys())[0]629				token_value = list(token.values())[0]630				tv = token_value.strip()631				if(token_type == "punctuator" and tv == ";"):632					status = statements()633				else:634					print("Syntax error: expected 'Punctuator semicolon4' but received " + str(token_value) + "\n")635					status = 1636			else:637				638				status = 0639				token = lexer()640				token_type = list(token.keys())[0]641				token_value = list(token.values())[0]642				#print("IN statements: " + token_value)643				global li644				#print(str(li))645				if(token_type == "keyword" and token_value == "do"):646					#print("Do")647					global do648					do = do+1649					token = lexer()650					token_type = list(token.keys())[0]651					token_value = list(token.values())[0].strip()652					lab1()653					if(token_type == "punctuator" and token_value == "{"):654						#print("{")655						status = statements()656						657						#print("status: " + str(status))658						if(status == 0):659					660							token = lexer()661							token_type = list(token.keys())[0]662							token_value = list(token.values())[0].strip()663							#print(token_value)664							if(token_type == "punctuator" and token_value == "}"):665								#print("}")666								token = lexer()667								token_type = list(token.keys())[0]668								token_value = list(token.values())[0].strip()669								do = do-1670								if(do==0):671									print("main --> do")672								else:673									print("do --> do")674								if(token_type == "keyword" and token_value == "while"):675									#print("while")676									token = lexer()677									token_type = list(token.keys())[0]678									token_value = list(token.values())[0].strip()679		680									if(token_type == "punctuator" and token_value == "("):681										#print("(")682										status = condition()683										lab2()684										if(status == 0):685					686											token = lexer()687											token_type = list(token.keys())[0]688											token_value = list(token.values())[0].strip()689		690											if(token_type == "punctuator" and token_value == ")"):691												#print(")")692												693												global wh694												print("while --> "+wh[0])695												token = lexer()696												token_type = list(token.keys())[0]697												token_value = list(token.values())[0].strip()698		699												if(token_type == "punctuator" and token_value == ";"):700													#print("in statements: " + token_value + "\n")701													status = statements()702													if(do==0):703														print("main --> while")704													else:705														print("do --> while")706					707												else:708													print("Syntax error: expected 'Punctuator semicolon5' ", end = "")709													print("but received " + str(token_value) + "\n")710													status = 1711					712											else:713												print("Syntax error: expected 'Punctuator close round bracket' ", end = "")714												print("but received " + str(token_value) + "\n")715												status = 1716					717									else:718										print("Syntax error: expected 'Punctuator open round bracket' ", end = "") 719										print("but received " + str(token_value) + "\n")720										status = 1721					722								else:723									print("Syntax error: expected 'Keyword while' but received " + str(token_value) + "\n")724									status = 1725					726							else:727								print("Syntax error: expected 'Punctuator10 close curly bracket' but received " + str(token_value) + "\n")728								status = 1729				730					else:731						print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")732						status = 1733		734				else:735					status = 0736					tv = token_value.strip()737					#print("IN statements: " + token_value)738					if(tv == "{"):739							status = statements()740							741							#print("status: " + str(status))742							if(status == 0):743						744								token = lexer()745								token_type = list(token.keys())[0]746								token_value = list(token.values())[0].strip()747								#print(token_value)748								if(token_type == "punctuator" and token_value == "}"):749									status = statements()750								else:751									print("Error")752					else:753			754						#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED755						global lb, fp756						#print(token_value)757						#print(str(lb) + " " + str(fp))758						lb = lb - len(token_value)759						fp = fp - len(token_value)760	761	return status762def initializationStatement():763	status = 0764	765	global lb, fp766		767	token = lexer()768	token_type = list(token.keys())[0]769	token_value = list(token.values())[0]770	if(token_type == "dataType"):771		if(token_value not in data):772			data[token_value] = {};773			#print(token_value)774		775		status = initStat(token_value)776		777		778	else:779		780		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED781		#print(token_value)782		#print(str(lb) + " " + str(fp))783		lb = lb - len(token_value)784		fp = fp - len(token_value)785		status = 2786	#print('returning' + str(status))	787	return status788	789	790def initStat(dt):791	status = multipleInitialization(dt)792	#print(status)793	794	795	796	'''if(status != 0 and status != 2):797		status = 0798		token = lexer()799		token_type = list(token.keys())[0]800		token_value = list(token.values())[0]801		tk = token_value802		if(token_type == "identifier"):803		804			if(token_value not in data[dt]):805				data[dt][token_value]=0806			else:807				print("Syntax Error: The variable has already been initialized\n")808				return 1809			token = lexer()810			token_type = list(token.keys())[0]811			token_value = list(token.values())[0]812	813			if(token_type == "assignmentOperator" and token_value == "="):814				815				status = E(dt,tk)816				"""817				print(status)818				status = 0819				token = lexer()820				token_type = list(token.keys())[0]821				token_value = list(token.values())[0]822				print(token_value)823				"""824			825			elif(token_type == "punctuator" and token_value == ","):826			827				global lb, fp828				#print(token_value)829				#print(str(lb) + " " + str(fp))830				lb = lb - len(token_value)831				fp = fp - len(token_value)832				status = 2833			834			else:835				836				print("Syntax error: expected 'Assignment1 Operator' but received " + str(token_value) + "\n")837				status = 1 '''838		839	840	return status841		842def multipleInitialization(dt):843	global data844	status = 0845	token = lexer()846	token_type = list(token.keys())[0]847	token_value = list(token.values())[0]848	tk = token_value849	if(token_type == "identifier"):850		push(tk)851		#print(tk)852		if(token_value not in data[dt]):853				data[dt][token_value]=0854				#print(" "+token_value +":)")855		else:856				print("Syntax Error: The variable has already been initialized\n")857				return 1858		859		token = lexer()860		token_type = list(token.keys())[0]861		token_value = list(token.values())[0]862		tv = token_value.strip()863		#print(token_value+" macha")864		if(tv == ";"):865			#print("; la")866			global lb, fp867			#print(token_value)868			#print(str(lb) + " " + str(fp))869			lb = lb - len(token_value)870			fp = fp - len(token_value)871			return 0;872		elif(token_type == "assignmentOperator" and tv == "="):873				874			status = E(dt,tk)875			codegen_assign()876			#print(status)877			878			if(status == 0):879				880				status = multinit(dt)881				if(status == 2):882					status = 0883				#print(status)884		elif(token_type == "punctuator" and tv == ","):885			#print(",")886			status = multipleInitialization(dt)887			'''global lb, fp888			#print(token_value)889			#print(str(lb) + " " + str(fp))890			lb = lb - len(token_value)891			fp = fp - len(token_value)892			status = 2 '''893			894		else:895				896			print("Syntax error: expected 'Assignment2 Operator' but received " + str(tv) + "\n")897			status = 1898	else:899			900		print("Syntax error: expected 'Identifier' but received " + str(tv) + "\n")901		status = 1902	903	return status904	905def multinit(dt):906	status = 0907	908	token = lexer()909	token_type = list(token.keys())[0]910	token_value = list(token.values())[0]911	tv = token_value.strip()912	913	if(token_type == "punctuator" and tv == ","):914	915		#print("got comma")916		status = multipleInitialization(dt)917	918	else:919		920		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED921		global lb, fp922		#print(token_value)923		#print(str(lb) + " " + str(fp))924		lb = lb - len(token_value)925		fp = fp - len(token_value)926		status = 2927		928	return status929def assignmentStatement():930	global data931	dty =''932	status = 0933	token = lexer()934	token_type = list(token.keys())[0]935	token_value = list(token.values())[0]936	tk = token_value937	#print("asgn")938	if(token_type == "identifier"):939		push(tk)940		#print(tk)941		for i in data:942			for j in data[i]:943				if(j==token_value):944					dty = i945		if(dty==''):946			print("The variable "+token_value+" has not been initialized.")947			return 1948		token = lexer()949		token_type = list(token.keys())[0]950		token_value = list(token.values())[0]951	952		if(token_type == "assignmentOperator" and token_value == "="):953				954			status = E(dty,tk)955			codegen_assign()956			957		else:958			959			print("Syntax error: expected 'Assignment3 Operator' but received " + str(token_value) + "\n")960			status = 1961	else:962			963		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED964		global lb, fp965		#print(token_value)966		#print(str(lb) + " " + str(fp))967		lb = lb - len(token_value)968		fp = fp - len(token_value)969		status = 2970	971	return status972def condition():973	status = 0974	975	status = C()976			977	return status978def C():979	status = 0980	token = lexer()981	token_type = list(token.keys())[0]982	token_value = list(token.values())[0]983	tv = token_value.strip()984	if(token_type == "identifier" or token_type=="number"):985		push(tv)986		token = lexer()987		token_type = list(token.keys())[0]988		token_value = list(token.values())[0]989		tk = token_value.strip()990		if(token_type == "relationalOperator" or token_type == "logicalOperator"):991			push(tk)992			status = C() 993		elif(token_value == ")"):994			global lb, fp995			#print(token_value)996			#print(str(lb) + " " + str(fp))997			lb = lb - len(token_value)998			fp = fp - len(token_value)999			return 01000		else:1001			return 11002	elif(not (token_type == "boolean")):1003		1004			print("Syntax error: expected 'Boolean' but received " + str(token_value) + "\n")1005			status = 11006	return status1007op = ""1008def E(dt,vn):1009	status = F(dt,vn)1010	if(status == 0):1011	1012		status = E1(dt,vn)1013	1014	return status1015	1016def E1(dt,vn):1017	status = 01018	token = lexer()1019	token_type = list(token.keys())[0]1020	token_value = list(token.values())[0]1021	tv = token_value.strip()1022	global op;1023	if(token_type == "arithmeticOperator" and tv == "+"):1024		op = "+"1025		push(tv)1026		#print(tv)1027		status = F(dt,vn)1028		codegen()1029		if(status == 0):1030		1031			status = E1(dt,vn)1032			1033	else:1034	1035		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1036		global lb, fp1037		#print(token_value)1038		#print(str(lb) + " " + str(fp))1039		lb = lb - len(token_value)1040		fp = fp - len(token_value)1041	return status1042def F(dt,vn):1043	status = 01044	1045	status = G(dt,vn)1046	1047	if(status == 0):1048	1049		status = F1(dt,vn)1050	return status1051	1052def F1(dt,vn):1053	status = 01054	token = lexer()1055	token_type = list(token.keys())[0]1056	token_value = list(token.values())[0]1057	tv = token_value.strip()1058	global op;1059	if(token_type == "arithmeticOperator" and tv == "-"):1060		op = "-"1061		push(tv)1062		#print(tv)1063		status = G(dt,vn)1064		codegen()1065		1066		if(status == 0):1067		1068			status = F1(dt,vn)1069			1070	else:1071	1072		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1073		global lb, fp1074		#print(token_value)1075		#print(str(lb) + " " + str(fp))1076		lb = lb - len(token_value)1077		fp = fp - len(token_value)1078	return status1079	1080def G(dt,vn):1081	status = 01082	1083	status = H(dt,vn)1084	if(status == 0):1085	1086		status = G1(dt,vn)1087	return status1088def G1(dt,vn):1089	status = 01090	1091	token = lexer()1092	token_type = list(token.keys())[0]1093	token_value = list(token.values())[0]1094	tv = token_value.strip()1095	global op;1096	if(token_type == "arithmeticOperator" and tv == "*"):1097		push(tv)1098		#print(tv)1099		op = "*"1100		status = H(dt,vn)1101		codegen()1102		if(status == 0):1103		1104			status = G1(dt,vn)1105			1106	else:1107	1108		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1109		global lb, fp1110		#print(token_value)1111		#print(str(lb) + " " + str(fp))1112		lb = lb - len(token_value)1113		fp = fp - len(token_value)1114	return status1115	1116def H(dt,vn):1117	status = 01118	1119	status = I(dt,vn)1120	1121	if(status == 0):1122	1123		status = H1(dt,vn)1124	return status1125	1126def H1(dt,vn):1127	status = 01128	1129	token = lexer()1130	token_type = list(token.keys())[0]1131	token_value = list(token.values())[0]1132	tv = token_value.strip()1133	1134	if(token_type == "arithmeticOperator" and tv == "/"):1135		global op;1136		op = "d";1137		push(tv)1138		#print(tv)1139		status = I(dt,vn)1140		codegen()1141		if(status == 0):1142		1143			status = H1(dt,vn)1144			1145	else:1146	1147		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1148		global lb, fp1149		#print(token_value + ":::" + str(len(token_value)))1150		#print(str(lb) + " " + str(fp))1151		1152		lb = lb - len(token_value)1153		fp = fp - len(token_value)1154	return status1155	1156def I(dt,vn):1157	global data1158	status = 01159	chk = 01160	token = lexer()1161	token_type = list(token.keys())[0]1162	token_value = list(token.values())[0]1163	tv = token_value.strip()1164	1165	if(token_type == "arithmeticOperator" and tv == "-"):1166		chk = 11167		push(tv)1168		#print(tv)1169		status = I()1170		codegen_umin()1171	elif(not(token_type == "identifier" or token_type == "number")):1172		print("Syntax error: expected 'Identifier/Number' but received " + str(token_value) + "\n")1173		status = 11174		return status1175	if(token_type == "identifier" or token_type == "number"):1176		push(tv)1177		#print(tv)1178	global op;1179	if(op==""):1180		if(token_type == "identifier"):1181			if(chk==1):1182				data[dt][vn]=-1*data[dt][token_value]1183				chk = 01184			else:1185				#print(token_value)1186				data[dt][vn]=data[dt][token_value]1187			1188		if(token_type == "number"):1189			if(chk==1):1190				data[dt][vn]=-1*float(token_value)1191				chk = 01192			else:1193				data[dt][vn]=float(token_value)1194	elif(op=="d"):1195		if(token_type == "identifier"):1196			if(chk==1):1197				data[dt][vn]/=-1*data[dt][token_value]1198				chk = 01199				op=""1200			else:1201				data[dt][vn]/=data[dt][token_value]1202				op=""1203			1204		if(token_type == "number"):1205			if(chk==1):1206				data[dt][vn]/=-1*float(token_value)1207				chk = 01208				op = ""1209			else:1210				data[dt][vn]/=float(token_value)1211				op = ""1212	elif(op=="*"):1213		if(token_type == "identifier"):1214			if(chk==1):1215				data[dt][vn]*=-1*data[dt][token_value]1216				chk = 01217				op=""1218			else:1219				data[dt][vn]*=data[dt][token_value]1220				op=""1221			1222		if(token_type == "number"):1223			if(chk==1):1224				data[dt][vn]*=-1*float(token_value)1225				chk = 01226				op = ""1227			else:1228				data[dt][vn]*=float(token_value)1229				op = ""1230	elif(op=="-"):1231		if(token_type == "identifier"):1232			if(chk==1):1233				data[dt][vn]-=-1*data[dt][token_value]1234				chk = 01235				op=""1236			else:1237				data[dt][vn]-=data[dt][token_value]1238				op=""1239			1240		if(token_type == "number"):1241			if(chk==1):1242				data[dt][vn]-=-1*float(token_value)1243				chk = 01244				op = ""1245			else:1246				data[dt][vn]-=float(token_value)1247				op = ""1248	elif(op=="+"):1249		if(token_type == "identifier"):1250			if(chk==1):1251				data[dt][vn]+=-1*data[dt][token_value]1252				chk = 01253				op=""1254			else:1255				data[dt][vn]+=data[dt][token_value]1256				op=""1257			1258		if(token_type == "number"):1259			if(chk==1):1260				data[dt][vn]+=-1*float(token_value)1261				chk = 01262				op = ""1263			else:1264				data[dt][vn]+=float(token_value)1265				op = ""1266	return status1267	1268	1269	1270prg = open("nocomments.c").read()1271symbolTable = dict()1272externalVariables = dict()1273localVariables = list()1274keyword = ["include", "define", "while", "do", "for", "return", "extern"]1275dataType = ["void", "int", "short", "long", "char", "float", "double"]1276preDefRoutine = ["printf", "scanf"]1277#headerFile = ["stdio.h", "stdlib.h", "math.h", "string.h"]1278identifier = "^[^\d\W]\w*\Z"1279punctuator = "^[()[\]{};.,]$"1280aritmeticOperator = "^[-+*/]$"1281assignmentOperator = "^=$"1282relationalOperator = ["<", ">", "<=", ">=", "==", "!="]1283logicalOperator = ["&&", "||", "!"]1284number = "^\d+$"1285spaces = "[' ''\n''\t']"1286loadSymbolTable()1287parse_start()1288'''1289for i in data:1290	for j in data[i]:1291		print(i+" "+j+" "+str(data[i][j]))1292'''1293"""1294while lb!=len(prg):1295	lexer()1296"""1297#print(symbolTable)1298#print(externalVariables)1299"""1300PARSER ERROR CODES:13010-SUCCESS13021-FAILURE1303"""1304		...lex.py
Source:lex.py  
1import re2#import ply.lex as lex3def loadSymbolTable():4	5	symbolTable["keyword"] = keyword6	symbolTable["dataType"] = dataType7	symbolTable["preDefRoutine"] = preDefRoutine8lb = 09fp = 110def validLexeme(string):11	12	res = False13	if(string in keyword):14		#print("key " + string + "\n")15		res = "keyword"16	elif(string in dataType):17		#print("dataType " + string + "\n")18		res = "dataType"19	elif(string in preDefRoutine):20		res = "preDefRoutine"21	elif(re.match(identifier, string)):22		#print("id " + string + "\n")23		res = "identifier"24	elif(re.match(punctuator, string)):25		#print("punc " + string)26		res = "punctuator"27	elif(re.match(number, string)):28		res = "number"29	elif(re.match(aritmeticOperator, string)):30		res = "arithmeticOperator"31	elif(re.match(assignmentOperator, string)):32		res = "assignmentOperator"33	elif(string in relationalOperator):34		res = "relationalOperator"35	elif(string in logicalOperator):36		res = "logicalOperator"37	elif(string == "#"):38		res = "hashOperator"39	elif(string == ".h"):40		res = "headerExtension"41	elif(string == "true" or string == "false"):42		res = "boolean"43	elif(string == "++"):44		res = "incrementOperator"45	elif(string == "--"):46		res = "decrementOperator"47	return res48top = 0;49i_ = 1;50tmp = "";51li = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]52def push(val):53	global top,li54	top = top+155	li[top]=val;56	57def codegen():58	global tmp,i_,top,li59	tmp = "t"60	tmp+=str(i_)61	print(tmp +" = "+str(li[top-2]), str(li[top-1]), str(li[top]));62	top-=2;63	li[top]=tmp64	i_=i_+1;65def codegen_umin():66	global tmp,i_,top,li67	tmp = "t"68	tmp+=str(i_)69	print(tmp+" = -"+str(li[top]));70	top=top-1;71	li[top]=tmp;72	i_=i_+1;73def codegen_assign():74	global tmp,i_,top,li75	print(str(li[top-1])+" = "+str(li[top]));76	top=top-2;77label = 178def lab1():79	global label80	print("L"+str(label)+":")81	label = label+182	83def lab2():84	global tmp,i_,top,li,label85	tmp = "t"86	tmp+=str(i_)87	print(tmp+" =  "+li[top-2],li[top-1],li[top]);88	print("if "+tmp+" goto L"+str(label-1));89	i_=i_+1;90	label = label-1;91	top = top-3;92def lexer():93	global lb94	global fp95	96	lexeme = prg[lb:fp]97	98	while(re.match(spaces, lexeme)):99		#print("x " + lexeme + "\n")100		lb = lb + 1101		fp = fp + 1102		lexeme = prg[lb:fp]103	104	#if(re.match(spaces, prg[105	#print("lexeme: " + lexeme + " type: " + str(type(lexeme)) + "\n");106	res = validLexeme(lexeme)107	while((not res) and (fp <= len(prg))):108		#print("lexeme1: " + lexeme + "\n")109		fp = fp + 1110		lexeme = prg[lb:fp]111		res = validLexeme(lexeme)112	113	#print(lexeme + "\n")114	tokenType = res115	res = validLexeme(lexeme)116	while((res) and (fp <= len(prg))):117		#print("lexeme2: " + lexeme + "\n")118		fp = fp + 1119		lexeme = prg[lb:fp]120		tokenType = res121		res = validLexeme(lexeme)122	123	lexeme = prg[lb:fp - 1]124	lb = fp - 1125	126	if((tokenType != False) and (tokenType not in symbolTable)):127		symbolTable[tokenType] = list()128		129	if((tokenType != False) and lexeme not in symbolTable[tokenType]):130		symbolTable[tokenType].append(lexeme.strip())131	132	#print("TOKEN: " + str(lexeme) + " TYPE: " + str(tokenType) + "\n");133	#print(str(lb) + " " + str(fp) + "\n")134	#print(str(len(prg)))135	return dict({tokenType:lexeme})136def parse_start():137	status = program()138	139	print("SUCCESSFUL PARSING\n") if(status == 0) else print("FAILED PARSING\n")140	141def program():142	status = preProcessorDirective()143	144	if(status == 0):145		status = externDeclaration()146		147		if(status == 0):148			status = mainFunction()149	150	return status151def preProcessorDirective():152	status = 0153	token = lexer()154	155	token_type = list(token.keys())[0]156	token_value = list(token.values())[0]157	158	if(token_type == "hashOperator"):159		160		token = lexer()161		token_type = list(token.keys())[0]162		token_value = list(token.values())[0]163		164		if(token_type == "keyword" and token_value == "include"):165				166			token = lexer()167			token_type = list(token.keys())[0]168			token_value = list(token.values())[0]169			170			if(token_type == "relationalOperator" and token_value == "<"):171				172				token = lexer()173				token_type = list(token.keys())[0]174				token_value = list(token.values())[0]175				176				if(token_type == "identifier"):177					178					token = lexer()179					token_type = list(token.keys())[0]180					token_value = list(token.values())[0]181					182					183					if(token_type == "headerExtension"):184					185						token = lexer()186						token_type = list(token.keys())[0]187						token_value = list(token.values())[0]	188					189						if(token_type == "relationalOperator" and token_value == ">"):190					191								status = preProcessorDirective()192								#print(str(status) + " after return\n")193							194						else:195							print("Syntax error: expected '>' but received " + str(token_value) + "\n")196							status = 1197					else:198						print("Syntax error: expected 'Header Extension' but received " + str(token_value) + "\n")199						status = 1200						201				else:202					print("Syntax error: expected 'Identifer' but received " + str(token_value) + "\n")203					status = 1204			else:	205				print("Syntax error: expected '<' but received " + str(token_value) + "\n")206				status = 1207				208		elif(token_type == "keyword" and token_value == "define"):209			210			211			token = lexer()212			token_type = list(token.keys())[0]213			token_value = list(token.values())[0]214			215			if(token_type == "identifier"):216				217				variableName = token_value218				token = lexer()219				token_type = list(token.keys())[0]220				token_value = list(token.values())[0]221				222				if(token_type == "number"):223					224					variableValue = int(token_value.strip())225					symbolTable[variableName] = variableValue226					status = preProcessorDirective()227					228					229				else:230					print("Syntax error: expected 'Number' but received " + str(token_value) + "\n")231					status = 1232			else:233				print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")234				status = 1235					236		else:237			print("Syntax error: expected 'Keyword include/define' but received " + str(token_value) + "\n")238			status = 1239	else:240		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED241		global lb, fp242		lb = lb - len(token_value)243		fp = fp - len(token_value)244		245	return status246	#print("Token key: " + str((token_type) + " values: " + str(token_value) + "\n"))	247def externDeclaration():248	249	250	status = 0251	token = lexer()252	token_type = list(token.keys())[0]253	token_value = list(token.values())[0]254	if(token_type == "keyword" and token_value == "extern"):255		status = declarationStatement()256		if(status == 0):257		258			token = lexer()259			token_type = list(token.keys())[0]260			token_value = list(token.values())[0].strip()261			if(not (token_type == "punctuator" and token_value == ";")):262				print("Syntax error: expected 'Punctuator Semicolon1' but received " + str(token_value) + "\n")263				status = 1264	else:265		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED266		global lb, fp267		lb = lb - len(token_value)268		fp = fp - len(token_value)	269	return status270def declarationStatement():271	272	status = 0273	token = lexer()274	token_type = list(token.keys())[0]275	token_value = list(token.values())[0]276	if(token_type == 'dataType'):277		278		dataType = token_value.strip()279		status = variable(dataType)280		281	else:282		print("Syntax error: expected 'Data Type' but received " + str(token_value) + "\n")283		status = 1284	285	return status286	287def optionalDeclarationStatement():288	289	#print("IN OPTDECL")290	status = 0291	token = lexer()292	token_type = list(token.keys())[0]293	token_value = list(token.values())[0]294	#print("before reset: " + str(token_value))295	if(token_type == 'dataType'):296	297		298		dataType = token_value.strip()299		status = variable(dataType)300		301	else:302	303		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED304		#print("resetting")305		global lb, fp306		lb = lb - len(token_value)307		fp = fp - len(token_value)308		status = 2309		"""310		if(token_value != "do"):311			token = lexer()312			token_type = list(token.keys())[0]313			token_value = list(token.values())[0]314		"""315		#print("after reset: " + str(token_value))316	return status317	318	319def variable(dataType):320	status = 0321	token = lexer()322	token_type = list(token.keys())[0]323	token_value = list(token.values())[0]324	325	if(token_type == 'identifier'):326		327		#print("received identifier, " + str(token_value))328		variableName = token_value.strip()329		330		if(dataType not in externalVariables):331			externalVariables[dataType] = list()332		333		if(variableName not in externalVariables[dataType]):334			externalVariables[dataType].append(variableName)335		else:336			print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")337			status = 1338		#externalVariables.append([variableName, dataType])339		if(status==0):340			status = variableDash(dataType)341	else:342		print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")343		status = 1344	345	return status346def variableDash(dataType):347	status = 0348	token = lexer()349	token_type = list(token.keys())[0]350	token_value = list(token.values())[0]351	352	if(token_type == 'punctuator' and token_value == ','):353		354		token = lexer()355		token_type = list(token.keys())[0]356		token_value = list(token.values())[0]357	358		if(token_type == 'identifier'):359			360			variableName = token_value.strip()361			if(dataType not in externalVariables):362				externalVariables[dataType] = list() 363		364			if(variableName not in externalVariables[dataType]):365				externalVariables[dataType].append(variableName)366			else:367				print("Syntax error: The variable "+str(token_value)+" of type "+token_type+" has already been initiliazed.\n")368				status = 1369			if(status==0):370				variableDash(dataType)371		372		else:373			print("Syntax error: expected 'Identifier' but received " + str(token_value) + "\n")374			status = 1375	else:376		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED377		global lb, fp378		#print(token_value)379		#print(str(lb) + " " + str(fp))380		lb = lb - len(token_value)381		fp = fp - len(token_value)382		#print(str(lb) + " " + str(fp))383	return status384	385def mainFunction():386	status = 0387	token = lexer()388	token_type = list(token.keys())[0]389	token_value = list(token.values())[0]390	391	if(token_type == "dataType" and token_value == "int"):392		393		status = mainDash()394		395	else:396		print("Syntax error: expected 'Return Type Integer' but received " + str(token_value) + "\n")397		status = 1398	399	return status400	401	402def mainDash():403	status = 0404	token = lexer()405	token_type = list(token.keys())[0]406	token_value = list(token.values())[0].strip()407	408	#print(str(token_type) + " " + str(token_value))409	410	if(token_type == "identifier" and token_value == "main"):411	412		token = lexer()413		token_type = list(token.keys())[0]414		token_value = list(token.values())[0].strip()415		416		if(token_type == "punctuator" and token_value == "("):417		418			token = lexer()419			token_type = list(token.keys())[0]420			token_value = list(token.values())[0].strip()421			422			if(token_type == "punctuator" and token_value == ")"):423			424				token = lexer()425				token_type = list(token.keys())[0]426				token_value = list(token.values())[0].strip()427				428				if(token_type == "punctuator" and token_value == "{"):429				430					status = statements()431					432					if(status == 0):433						434						token = lexer()435						token_type = list(token.keys())[0]436						token_value = list(token.values())[0].strip()437						#print(token_value + str(len(token_value)))438						if(not(token_type == "punctuator" and token_value == "}")):439							print("Syntax error: expected 'Punctuator1 close curly bracket' but received " + str(token_value) + "\n")440							status = 1441				else:442					print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")443					status = 1444						445				446			447			elif(token_type == "dataType" and token_value == "int"):448			449				token = lexer()450				token_type = list(token.keys())[0]451				token_value = list(token.values())[0].strip()452				453				if(token_type == "identifier" and token_value == "argc"):454				455					token = lexer()456					token_type = list(token.keys())[0].strip()457					token_value = list(token.values())[0].strip()458					459					if(token_type == "punctuator" and token_value == ","):460				461						token = lexer()462						token_type = list(token.keys())[0]463						token_value = list(token.values())[0].strip()464						465						if(token_type == "dataType" and token_value == "char"):466				467							token = lexer()468							token_type = list(token.keys())[0]469							token_value = list(token.values())[0].strip()470							471							if(token_type == "arithmeticOperator" and token_value == "*"):472				473								token = lexer()474								token_type = list(token.keys())[0]475								token_value = list(token.values())[0]	.strip()476								477								if(token_type == "identifier" and token_value == "argv"):478				479									token = lexer()480									token_type = list(token.keys())[0]481									token_value = list(token.values())[0].strip()482									483									if(token_type == "punctuator" and token_value == "["):484				485										token = lexer()486										token_type = list(token.keys())[0]487										token_value = list(token.values())[0].strip()488										489										if(token_type == "punctuator" and token_value == "]"):490				491											token = lexer()492											token_type = list(token.keys())[0]493											token_value = list(token.values())[0].strip()494											495											if(token_type == "punctuator" and token_value == ")"):496				497												token = lexer()498												token_type = list(token.keys())[0]499												token_value = list(token.values())[0].strip()500											501												if(token_type == "punctuator" and token_value == "{"):502				503													status = statements()504					505													if(status == 0):506						507														token = lexer()508														token_type = list(token.keys())[0]509														token_value = list(token.values())[0].strip()510				511														if(not(token_type == "punctuator" and token_value == "}")):512															print("Syntax error: expected 'Punctuator2 close curly bracket' ", end = "")513															print("but received " + str(token_value) + "\n")514															status = 1515												else:516													print("Syntax error: expected 'Punctuator open curly bracket'  ", end = "")517													print("but received " + str(token_value) + "\n")518													status = 1519											520											else:521												print("Syntax error: expected 'Punctuator close round bracket' but received ", end = "")522												print(str(token_value) + "\n")523												status = 1524											525										else:526											print("Syntax error: expected 'Punctuator close square bracket' but received ", end = "")527											print(str(token_value) + "\n")528											status = 1529									else:530										print("Syntax error: expected 'Punctuator open square bracket' but received ", end = "")531										print(str(token_value) + "\n")532										status = 1533									534								else:535									print("Syntax error: expected 'Identifier argv' but received " + str(token_value) + "\n")536									status = 1537									538							else:539								print("Syntax error: expected 'Pointer operator *' but received " + str(token_value) + "\n")540								status = 1541							542						else:543							print("Syntax error: expected 'Data type character' but received " + str(token_value) + "\n")544							status = 1545						546					else:547						print("Syntax error: expected 'Punctuator comma' but received " + str(token_value) + "\n")548						status = 1	549				550				else:551					print("Syntax error: expected 'Identifier argc' but received " + str(token_value) + "\n")552					status = 1553				554			555			else:556				print("Syntax error: expected 'Punctuator close round bracket' but received " + str(token_value) + "\n")557				status = 1558				559		else:560			print("Syntax error: expected 'Punctuator open round bracket' but received " + str(token_value) + "\n")561			status = 1562	563	else:564		print("Syntax error: expected 'Identifier main' but received " + str(token_value) + "\n")565		status = 1566		567	return status568data = {}569def statements():570	571	572	#print("top of statements\n")573	status = 0574	status = initializationStatement()575	576	if(status == 0):577		#print("init success")578		token = lexer()579		token_type = list(token.keys())[0]580		token_value = list(token.values())[0]581		#print(token_value +" new value")582		tv = token_value.strip()583		if(token_type == "punctuator" and tv == ";"):584			status = statements()585		else:586			print("Syntax error: expected 'Punctuator semicolon2' but received " + str(token_value) + "\n")587			status = 1588			589			590	else:591		'''	token = lexer()592		token_type = list(token.keys())[0]593		token_value = list(token.values())[0]594		tv = token_value.strip()'''595		#print("dc" + " " + tv)596		597		598		status = optionalDeclarationStatement()599		#print(status)600		if(status == 0):	601			#print("decl success")602			603			token = lexer()604			token_type = list(token.keys())[0]605			token_value = list(token.values())[0]606			tv = token_value.strip()607			if(token_type == "punctuator" and tv == ";"):608				609				status = statements()610			else:611				print("Syntax error: expected 'Punctuator semicolon3' but received " + str(token_value) + "\n")612				status = 1613		else:614			615			status = assignmentStatement()616			if(status == 0):617				#print("assgn success")618				619				token = lexer()620				token_type = list(token.keys())[0]621				token_value = list(token.values())[0]622				tv = token_value.strip()623				if(token_type == "punctuator" and tv == ";"):624					status = statements()625				else:626					print("Syntax error: expected 'Punctuator semicolon4' but received " + str(token_value) + "\n")627					status = 1628			else:629				630				status = 0631				token = lexer()632				token_type = list(token.keys())[0]633				token_value = list(token.values())[0]634				#print("IN statements: " + token_value)635				if(token_type == "keyword" and token_value == "do"):636					#print("Do")637					token = lexer()638					token_type = list(token.keys())[0]639					token_value = list(token.values())[0].strip()640					lab1()641					if(token_type == "punctuator" and token_value == "{"):642						#print("{")643						status = statements()644						645						#print("status: " + str(status))646						if(status == 0):647					648							token = lexer()649							token_type = list(token.keys())[0]650							token_value = list(token.values())[0].strip()651							#print(token_value)652							if(token_type == "punctuator" and token_value == "}"):653								#print("}")654								token = lexer()655								token_type = list(token.keys())[0]656								token_value = list(token.values())[0].strip()657		658								if(token_type == "keyword" and token_value == "while"):659									#print("while")660									token = lexer()661									token_type = list(token.keys())[0]662									token_value = list(token.values())[0].strip()663		664									if(token_type == "punctuator" and token_value == "("):665										#print("(")666										status = condition()667										lab2()668										if(status == 0):669					670											token = lexer()671											token_type = list(token.keys())[0]672											token_value = list(token.values())[0].strip()673		674											if(token_type == "punctuator" and token_value == ")"):675												#print(")")676												token = lexer()677												token_type = list(token.keys())[0]678												token_value = list(token.values())[0].strip()679		680												if(token_type == "punctuator" and token_value == ";"):681													#print("in statements: " + token_value + "\n")682													status = statements()683					684												else:685													print("Syntax error: expected 'Punctuator semicolon5' ", end = "")686													print("but received " + str(token_value) + "\n")687													status = 1688					689											else:690												print("Syntax error: expected 'Punctuator close round bracket' ", end = "")691												print("but received " + str(token_value) + "\n")692												status = 1693					694									else:695										print("Syntax error: expected 'Punctuator open round bracket' ", end = "") 696										print("but received " + str(token_value) + "\n")697										status = 1698					699								else:700									print("Syntax error: expected 'Keyword while' but received " + str(token_value) + "\n")701									status = 1702					703							else:704								print("Syntax error: expected 'Punctuator10 close curly bracket' but received " + str(token_value) + "\n")705								status = 1706				707					else:708						print("Syntax error: expected 'Punctuator open curly bracket' but received " + str(token_value) + "\n")709						status = 1710		711				else:712		713					#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED714					global lb, fp715					#print(token_value)716					#print(str(lb) + " " + str(fp))717					lb = lb - len(token_value)718					fp = fp - len(token_value)719	720	return status721def initializationStatement():722	status = 0723	724	global lb, fp725		726	token = lexer()727	token_type = list(token.keys())[0]728	token_value = list(token.values())[0]729	if(token_type == "dataType"):730		if(token_value not in data):731			data[token_value] = {};732			#print(token_value)733		734		status = initStat(token_value)735		736		737	else:738		739		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED740		#print(token_value)741		#print(str(lb) + " " + str(fp))742		lb = lb - len(token_value)743		fp = fp - len(token_value)744		status = 2745	#print('returning' + str(status))	746	return status747	748	749def initStat(dt):750	status = multipleInitialization(dt)751	#print(status)752	753	754	755	'''if(status != 0 and status != 2):756		status = 0757		token = lexer()758		token_type = list(token.keys())[0]759		token_value = list(token.values())[0]760		tk = token_value761		if(token_type == "identifier"):762		763			if(token_value not in data[dt]):764				data[dt][token_value]=0765			else:766				print("Syntax Error: The variable has already been initialized\n")767				return 1768			token = lexer()769			token_type = list(token.keys())[0]770			token_value = list(token.values())[0]771	772			if(token_type == "assignmentOperator" and token_value == "="):773				774				status = E(dt,tk)775				"""776				print(status)777				status = 0778				token = lexer()779				token_type = list(token.keys())[0]780				token_value = list(token.values())[0]781				print(token_value)782				"""783			784			elif(token_type == "punctuator" and token_value == ","):785			786				global lb, fp787				#print(token_value)788				#print(str(lb) + " " + str(fp))789				lb = lb - len(token_value)790				fp = fp - len(token_value)791				status = 2792			793			else:794				795				print("Syntax error: expected 'Assignment1 Operator' but received " + str(token_value) + "\n")796				status = 1 '''797		798	799	return status800		801def multipleInitialization(dt):802	global data803	status = 0804	token = lexer()805	token_type = list(token.keys())[0]806	token_value = list(token.values())[0]807	tk = token_value808	if(token_type == "identifier"):809		push(tk)810		#print(tk)811		if(token_value not in data[dt]):812				if(dt=="int"):813					data[dt][token_value]=int(0)814				elif(dt=="char"):815					data[dt][token_value]=string(0)816				elif(dt=="float"):817					data[dt][token_value]=float(0)818				elif(dt=="double"):819					data[dt][token_value]=float(0)820				else:821					data[dt][token_value]=0822				#print(" "+token_value +":)")823		else:824				print("Syntax Error: The variable has already been initialized\n")825				return 1826		827		token = lexer()828		token_type = list(token.keys())[0]829		token_value = list(token.values())[0]830		tv = token_value.strip()831		#print(token_value+" macha")832		if(tv == ";"):833			#print("; la")834			global lb, fp835			#print(token_value)836			#print(str(lb) + " " + str(fp))837			lb = lb - len(token_value)838			fp = fp - len(token_value)839			return 0;840		elif(token_type == "assignmentOperator" and tv == "="):841				842			status = E(dt,tk)843			codegen_assign()844			#print(status)845			846			if(status == 0):847				848				status = multinit(dt)849				if(status == 2):850					status = 0851				#print(status)852		elif(token_type == "punctuator" and tv == ","):853			#print(",")854			status = multipleInitialization(dt)855			'''global lb, fp856			#print(token_value)857			#print(str(lb) + " " + str(fp))858			lb = lb - len(token_value)859			fp = fp - len(token_value)860			status = 2 '''861			862		else:863				864			print("Syntax error: expected 'Assignment2 Operator' but received " + str(tv) + "\n")865			status = 1866	else:867			868		print("Syntax error: expected 'Identifier' but received " + str(tv) + "\n")869		status = 1870	871	return status872	873def multinit(dt):874	status = 0875	876	token = lexer()877	token_type = list(token.keys())[0]878	token_value = list(token.values())[0]879	tv = token_value.strip()880	881	if(token_type == "punctuator" and tv == ","):882	883		#print("got comma")884		status = multipleInitialization(dt)885	886	else:887		888		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED889		global lb, fp890		#print(token_value)891		#print(str(lb) + " " + str(fp))892		lb = lb - len(token_value)893		fp = fp - len(token_value)894		status = 2895		896	return status897def assignmentStatement():898	global data899	dty =''900	status = 0901	token = lexer()902	token_type = list(token.keys())[0]903	token_value = list(token.values())[0]904	tk = token_value905	#print("asgn")906	if(token_type == "identifier"):907		push(tk)908		#print(tk)909		for i in data:910			for j in data[i]:911				if(j==token_value):912					dty = i913		if(dty==''):914			print("The variable "+token_value+" has not been initialized.")915			return 1916		token = lexer()917		token_type = list(token.keys())[0]918		token_value = list(token.values())[0]919	920		if(token_type == "assignmentOperator" and token_value == "="):921				922			status = E(dty,tk)923			codegen_assign()924			925		else:926			927			print("Syntax error: expected 'Assignment3 Operator' but received " + str(token_value) + "\n")928			status = 1929	else:930			931		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED932		global lb, fp933		#print(token_value)934		#print(str(lb) + " " + str(fp))935		lb = lb - len(token_value)936		fp = fp - len(token_value)937		status = 2938	939	return status940def condition():941	status = 0942	943	status = C()944			945	return status946def C():947	status = 0948	token = lexer()949	token_type = list(token.keys())[0]950	token_value = list(token.values())[0]951	tv = token_value.strip()952	if(token_type == "identifier" or token_type=="number"):953		push(tv)954		token = lexer()955		token_type = list(token.keys())[0]956		token_value = list(token.values())[0]957		tk = token_value.strip()958		if(token_type == "relationalOperator" or token_type == "logicalOperator"):959			push(tk)960			status = C() 961		elif(token_value == ")"):962			global lb, fp963			#print(token_value)964			#print(str(lb) + " " + str(fp))965			lb = lb - len(token_value)966			fp = fp - len(token_value)967			return 0968		else:969			return 1970	elif(not (token_type == "boolean")):971		972			print("Syntax error: expected 'Boolean' but received " + str(token_value) + "\n")973			status = 1974	return status975op = ""976def E(dt,vn):977	status = F(dt,vn)978	if(status == 0):979	980		status = E1(dt,vn)981	982	return status983	984def E1(dt,vn):985	status = 0986	token = lexer()987	token_type = list(token.keys())[0]988	token_value = list(token.values())[0]989	tv = token_value.strip()990	global op;991	if(token_type == "arithmeticOperator" and tv == "+"):992		op = "+"993		push(tv)994		#print(tv)995		status = F(dt,vn)996		codegen()997		if(status == 0):998		999			status = E1(dt,vn)1000			1001	else:1002	1003		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1004		global lb, fp1005		#print(token_value)1006		#print(str(lb) + " " + str(fp))1007		lb = lb - len(token_value)1008		fp = fp - len(token_value)1009	return status1010def F(dt,vn):1011	status = 01012	1013	status = G(dt,vn)1014	1015	if(status == 0):1016	1017		status = F1(dt,vn)1018	return status1019	1020def F1(dt,vn):1021	status = 01022	token = lexer()1023	token_type = list(token.keys())[0]1024	token_value = list(token.values())[0]1025	tv = token_value.strip()1026	global op;1027	if(token_type == "arithmeticOperator" and tv == "-"):1028		op = "-"1029		push(tv)1030		#print(tv)1031		status = G(dt,vn)1032		codegen()1033		1034		if(status == 0):1035		1036			status = F1(dt,vn)1037			1038	else:1039	1040		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1041		global lb, fp1042		#print(token_value)1043		#print(str(lb) + " " + str(fp))1044		lb = lb - len(token_value)1045		fp = fp - len(token_value)1046	return status1047	1048def G(dt,vn):1049	status = 01050	1051	status = H(dt,vn)1052	if(status == 0):1053	1054		status = G1(dt,vn)1055	return status1056def G1(dt,vn):1057	status = 01058	1059	token = lexer()1060	token_type = list(token.keys())[0]1061	token_value = list(token.values())[0]1062	tv = token_value.strip()1063	global op;1064	if(token_type == "arithmeticOperator" and tv == "*"):1065		push(tv)1066		#print(tv)1067		op = "*"1068		status = H(dt,vn)1069		codegen()1070		if(status == 0):1071		1072			status = G1(dt,vn)1073			1074	else:1075	1076		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1077		global lb, fp1078		#print(token_value)1079		#print(str(lb) + " " + str(fp))1080		lb = lb - len(token_value)1081		fp = fp - len(token_value)1082	return status1083	1084def H(dt,vn):1085	status = 01086	1087	status = I(dt,vn)1088	1089	if(status == 0):1090	1091		status = H1(dt,vn)1092	return status1093	1094def H1(dt,vn):1095	status = 01096	1097	token = lexer()1098	token_type = list(token.keys())[0]1099	token_value = list(token.values())[0]1100	tv = token_value.strip()1101	1102	if(token_type == "arithmeticOperator" and tv == "/"):1103		global op;1104		op = "d";1105		push(tv)1106		#print(tv)1107		status = I(dt,vn)1108		codegen()1109		if(status == 0):1110		1111			status = H1(dt,vn)1112			1113	else:1114	1115		#RESET POINTERS SINCE A WRONG TOKEN WAS OBTAINED1116		global lb, fp1117		#print(token_value + ":::" + str(len(token_value)))1118		#print(str(lb) + " " + str(fp))1119		1120		lb = lb - len(token_value)1121		fp = fp - len(token_value)1122	return status1123	1124def I(dt,vn):1125	global data1126	status = 01127	chk = 01128	token = lexer()1129	token_type = list(token.keys())[0]1130	token_value = list(token.values())[0]1131	tv = token_value.strip()1132	1133	if(token_type == "arithmeticOperator" and tv == "-"):1134		chk = 11135		push(tv)1136		#print(tv)1137		status = I()1138		codegen_umin()1139	elif(not(token_type == "identifier" or token_type == "number")):1140		print("Syntax error: expected 'Identifier/Number' but received " + str(token_value) + "\n")1141		status = 11142		return status1143	if(token_type == "identifier" or token_type == "number"):1144		push(tv)1145		#print(tv)1146	global op;1147	g = True1148	if(token_value == "identifier"):1149		if(token_value not in data[dt]):1150			print("Syntax error: The variable "+token_value+" not in "+dt)1151			g = False1152	elif(token_value == "number"):1153		if(not isinstance(token_value,dt)):1154			print("Syntax error: The variable belongs to a different type")1155			False1156	if(op=="" and g == True):1157		if(token_type == "identifier"):1158			if(chk==1):1159				data[dt][vn]=-1*data[dt][token_value]1160				chk = 01161			else:1162				#print(token_value)1163				data[dt][vn]=data[dt][token_value]1164			1165		if(token_type == "number"):1166			if(chk==1):1167				data[dt][vn]=-1*float(token_value)1168				chk = 01169			else:1170				data[dt][vn]=float(token_value)1171	elif(op=="d" and g == True):1172		if(token_type == "identifier"):1173			if(chk==1):1174				data[dt][vn]/=-1*data[dt][token_value]1175				chk = 01176				op=""1177			else:1178				data[dt][vn]/=data[dt][token_value]1179				op=""1180			1181		if(token_type == "number"):1182			if(chk==1):1183				data[dt][vn]/=-1*float(token_value)1184				chk = 01185				op = ""1186			else:1187				data[dt][vn]/=float(token_value)1188				op = ""1189	elif(op=="*" and g == True):1190		if(token_type == "identifier"):1191			if(chk==1):1192				data[dt][vn]*=-1*data[dt][token_value]1193				chk = 01194				op=""1195			else:1196				data[dt][vn]*=data[dt][token_value]1197				op=""1198			1199		if(token_type == "number"):1200			if(chk==1):1201				data[dt][vn]*=-1*float(token_value)1202				chk = 01203				op = ""1204			else:1205				data[dt][vn]*=float(token_value)1206				op = ""1207	elif(op=="-" and g == True):1208		if(token_type == "identifier"):1209			if(chk==1):1210				data[dt][vn]-=-1*data[dt][token_value]1211				chk = 01212				op=""1213			else:1214				data[dt][vn]-=data[dt][token_value]1215				op=""1216			1217		if(token_type == "number"):1218			if(chk==1):1219				data[dt][vn]-=-1*float(token_value)1220				chk = 01221				op = ""1222			else:1223				data[dt][vn]-=float(token_value)1224				op = ""1225	elif(op=="+" and g == True):1226		if(token_type == "identifier"):1227			if(chk==1):1228				data[dt][vn]+=-1*data[dt][token_value]1229				chk = 01230				op=""1231			else:1232				data[dt][vn]+=data[dt][token_value]1233				op=""1234			1235		if(token_type == "number"):1236			if(chk==1):1237				data[dt][vn]+=-1*float(token_value)1238				chk = 01239				op = ""1240			else:1241				data[dt][vn]+=float(token_value)1242				op = ""1243	return status1244	1245	1246	1247prg = open("nocomments.c").read()1248symbolTable = dict()1249externalVariables = dict()1250localVariables = list()1251keyword = ["include", "define", "while", "do", "for", "return", "extern"]1252dataType = ["void", "int", "short", "long", "char", "float", "double"]1253preDefRoutine = ["printf", "scanf"]1254#headerFile = ["stdio.h", "stdlib.h", "math.h", "string.h"]1255identifier = "^[^\d\W]\w*\Z"1256punctuator = "^[()[\]{};.,]$"1257aritmeticOperator = "^[-+*/]$"1258assignmentOperator = "^=$"1259relationalOperator = ["<", ">", "<=", ">=", "==", "!="]1260logicalOperator = ["&&", "||", "!"]1261number = "^\d+$"1262spaces = "[' ''\n''\t']"1263loadSymbolTable()1264parse_start()1265'''1266for i in data:1267	for j in data[i]:1268		print(i+" "+j+" "+str(data[i][j]))1269'''1270"""1271while lb!=len(prg):1272	lexer()1273"""1274#print(symbolTable)1275#print(externalVariables)1276"""1277PARSER ERROR CODES:12780-SUCCESS12791-FAILURE1280"""1281		...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
