Best Python code snippet using autotest_python
parserMINE.py
Source:parserMINE.py  
1from first_follow_getters import *2import copy3from Neuspeak_Lexer import *4class ParseTree:5    def __init__(self):6        self.data = ''7        self.children = []8        self.parent = None9        self.index = 010class Parser:11    #Handling all the files involved in computing12    def __init__(self, 13                 path_to_code,14                 path_to_lexer_output,15                 read_cfg,16                 write_productions,17                 write_first,18                 write_follow,19                 write_table,20                 write_actionTable_loc,21                 read_first_follow_table):22        """23        Args:24            path_to_code ([str]): [The Path to the file that contains Neuspeak Code]25            path_to_lexer_output ([str]): [The path to the file in which the lexer will write the token stream]26            read_cfg ([str]): [The path to the file from with the CFG for Neuspeak is kept]27            write_productions ([str]): [The path to the file where the list of terminals, non-terminals and productions are written]28            write_first ([str]): [The path to the file where the first set is written]29            write_follow ([str]): [The path to the file where the follow set is written]30            write_table ([str]): [The path to the file where the Parse Table for the Neuspeak CFG is written]31            write_actionTable_loc ([str]): [The path to the file where the action table for the current file is written]32            read_first_follow_table ([str]): [The path to the file where the first follow table exists]33        """34        self.write_actionTable_loc = write_actionTable_loc35        36        self.root = ParseTree() 37        self.lex = Lexer(path_to_code, path_to_lexer_output) # lex object has a member self.lex.tokenStream is list of tokens38        self.tokenStream = self.lex.tokenStream39        40        #self.tokenStream = ts.split()41        self.fftObj = FirstFollowTable(read_first_follow_table,42                                       read_cfg,43                                       write_table,44                                       write_first,45                                       write_follow,46                                       write_productions)47        self.start_nt = self.fftObj.start_nt48        self.stack = ['$', self.start_nt]49        self.table = self.fftObj.table50        self.NT = sorted(self.fftObj.NT)51        self.T = sorted(self.fftObj.T)52        self.actionTable = []53        self.max_actionTable_widths = []54        55        self.root.data = self.start_nt56        self.currNode = self.root57        58        self.getTree()59        self.writeActionTable()60    61    def addToTree(self, R):62        """63        Summary:64            When a non-terminal is at the top of the stack, it is expanded into its productions. 65            This function reflects that in the tree. A new node is created for every terminal and non-terminal.66        Args:67            R ([list of strings]): [When a non-terminal is at the top of the stack, it is expanded into its 68                                    productions. R contains the RHS of the rule]69        """70        for ind, s in enumerate(R):71            node = ParseTree()72            node.parent = self.currNode73            node.index = ind74            node.data = s.strip()75            self.currNode.children.append(node)76        self.currNode = self.currNode.children[-1]               77        78    def setNode(self):79        """80        Summary:81            This moves the current Node to the right location in the parse tree. This is used to 82            ensure that the current node and the top of the stack are always in sync.83        """84        while True:85            if self.currNode.data == self.root.data:86                return87            if self.currNode.index == 0:88                self.currNode = self.currNode.parent89            else:90                break91        92        self.currNode = self.currNode.parent.children[self.currNode.index - 1]93                    94    def getTree(self):95        curr = 096        count = 097        line = 198        #loops till either stack or token Stream is empty99        while len(self.stack) > 0 and count < 1000 and curr < len(self.tokenStream): 100            # means element at the top of the stack is non-terminal101            if self.stack[-1].strip().startswith('{'): 102                row = self.NT.index(self.stack[-1].strip()) + 1103                col = self.table[0].index(self.tokenStream[curr])104                #Value in the parse table corresponding to given stack and tokenstream is a prodution105                if '=' in self.table[row][col]:106                    self.stack.pop()107                    ruleRHS = self.table[row][col].split('=')[1].strip().split(' ')108                    self.stack.extend(ruleRHS[::-1]) # get RHS of rule109                    self.actionTable.append([self.tokenStream[curr:], self.stack[:], self.table[row][col], str(line)])110                    self.addToTree(ruleRHS[::-1])111                #Error Handling112                else:113                    #Value in the parse table corresponding to given stack and tokenstream is Sync114                    if self.table[row][col].strip() == 'sync':115                        error = '   EXPECTED ' + self.stack[-1] + ' GOT ' + self.tokenStream[curr]116                        self.stack.pop()117                        self.setNode()118                        self.actionTable.append([self.tokenStream[curr:], self.stack[:], '*' * 6 + 'sync' + '*' * 6, str(line) + error])119                    #Value in the parse table corresponding to given stack and tokenstream is Skip120                    if self.table[row][col].strip() == 'skip':121                        if self.tokenStream[curr] == '$':122                            error = '   EXPECTED ' + \123                                self.stack[-1] + ' GOT ' + \124                                    self.tokenStream[curr]125                            self.stack.pop()126                            self.actionTable.append([self.tokenStream[curr:], self.stack[:], '*' * 6 + 'sync' + '*' * 6, str(line) + error ])127                        else:128                            error = '   EXPECTED ' + \129                                self.stack[-1] + ' GOT ' + \130                                    self.tokenStream[curr]131                            curr += 1132                            self.actionTable.append([self.tokenStream[curr:], self.stack[:], '*' * 6 + 'skip' + '*' * 6, str(line) + error])133            # means top of stack is terminal or epsilon                134            elif self.stack[-1].strip().startswith('['): 135                # Used to record line number of the code136                if self.stack[-1] == '[NL]':137                    line += 1138                # means the terminal at top of stack matches with current token on token Stream    139                if self.stack[-1].strip() == self.tokenStream[curr]: 140                    curr += 1141                    termin = self.stack.pop()142                    self.actionTable.append([self.tokenStream[curr:], self.stack[:], 'match ' + str(termin), str(line)])143                    self.setNode()144                # top of the stack is epsilon so would be popped    145                elif self.stack[-1].strip() == '[~]':146                    termin = self.stack.pop()147                    self.actionTable.append([self.tokenStream[curr:], self.stack[:], 'epsilon pop ' + str(termin), str(line)])148                    self.setNode()149                # ERROR Handling150                # terminal at top of stack and current token in token stream does not match    151                else:152                    row = self.NT.index(self.currNode.parent.data) + 1153                    col = self.table[0].index(self.tokenStream[curr])154                    # Value in parse table for NT corresponding to stack top terminal and tokenstream current terminal is Sync 155                    if self.table[row][col].strip() == 'sync':156                        error = '   EXPECTED ' + \157                            self.stack[-1] + ' GOT ' + self.tokenStream[curr]158                        self.stack.pop()159                        self.setNode()160                        self.actionTable.append(161                            [self.tokenStream[curr:], self.stack[:], '*' * 6 + 'sync' + '*' * 6, str(line) + error])162                    # Value in parse table for NT corresponding to stack top terminal and tokenstream current terminal is Skip163                    elif self.table[row][col].strip() == 'skip':164                        error = '   EXPECTED ' + \165                                self.stack[-1] + ' GOT ' + self.tokenStream[curr]166                        curr += 1 if self.tokenStream[curr] != '$' else 0167                        if self.tokenStream[curr] == '$':168                            self.tokenStream[curr]169                            self.stack.pop()170                            self.actionTable.append(171                                [self.tokenStream[curr:], self.stack[:], '*' * 6 + 'sync' + '*' * 6, str(line) + error])172                        else:173                            self.tokenStream[curr]174                            self.actionTable.append(175                                [self.tokenStream[curr:], self.stack[:], '*' * 6 + 'skip' + '*' * 6, str(line) + error])176                    else:177                        error = '   EXPECTED ' + self.stack[-1] + ' GOT ' + self.tokenStream[curr]178                        self.stack.pop()179                        self.actionTable.append(180                            [self.tokenStream[curr:], self.stack[:], '*' * 6 + 'sync' + '*' * 6, str(line) + error])181            else:182                # we reached the end of Stack and Token Stream hence the parsing is complete183                if self.stack[-1].strip() == '$' and self.tokenStream[curr] == '$':184                    self.stack.pop()185                    curr += 1186                    self.actionTable.append([self.tokenStream[curr:], self.stack[:], 'matched. PARSING COMPLETE', str(line)])187                else:188                    curr += 1189                    self.actionTable.append([self.tokenStream[curr:], self.stack[:], '*' * 6 + 'skip' + '*' * 6, str(line)])190            count += 1191    192    # writing action table in a file193    def writeActionTable(self):194        """195        Summary:196            Action table is written to a file. Max column width is simultaneously determined to 197            set the widths of the columns so that the columns do not become too difficult to read.198        """199        row_n = len(self.actionTable)200        for c in range(4):201            max_len = 0202            for r in range(row_n):203                try:204                    if len(' '.join(self.actionTable[r][c])) > max_len:205                        max_len = len(' '.join(self.actionTable[r][c]))206                except:207                    print(r, c)208            self.max_actionTable_widths.append(max_len)209        with open(self.write_actionTable_loc, 'w') as f:210            s1 = '{:<' + str(self.max_actionTable_widths[0] + 3)  + '}'211            s2 = '{:<' + str(self.max_actionTable_widths[1] + 3)  + '}'212            s3 = '{:<' + str(self.max_actionTable_widths[2] + 3)  + '}'213            s4 = '{:<' + str(self.max_actionTable_widths[3] + 3)  + '}'214            s = s1 + s2 + s3 + s4215            f.write(s.format('TOKEN STREAM', 'STACK', 'ACTION', 'LINE NUMBER') + '\n')216            for row in self.actionTable:217                f.write(s.format(' '.join(row[0]), ' '.join(row[1]), str(row[2]), str(row[3])) + '\n')218   ...test_tokenizing.py
Source:test_tokenizing.py  
1#!/usr/bin/python32import jk_testing3import jk_version4import jk_php_version_parser5tokenizer = jk_php_version_parser.ComposerVersionTokenizer()6#7# Successes8#9@jk_testing.TestCase()10def test_tokenizing_version_1(ctx:jk_testing.TestContext):11	tokenStream = tokenizer.tokenize("1.2.3")12	tokenStream.dump(printFunc=ctx.log.debug)13	jk_testing.Assert.isEqual(len(tokenStream), 1)14	jk_testing.Assert.isNotNone(15		tokenStream.tryMatchSequence(16			jk_php_version_parser._ComposerTokenPattern("v", "1.2.3"),17		)18	)19#20@jk_testing.TestCase()21def test_tokenizing_version_2(ctx:jk_testing.TestContext):22	tokenStream = tokenizer.tokenize("1.2.3 2.3.4")23	tokenStream.dump(printFunc=ctx.log.debug)24	jk_testing.Assert.isEqual(len(tokenStream), 2)25	jk_testing.Assert.isNotNone(26		tokenStream.tryMatchSequence(27			jk_php_version_parser._ComposerTokenPattern("v", "1.2.3"),28			jk_php_version_parser._ComposerTokenPattern("v", "2.3.4"),29		)30	)31#32@jk_testing.TestCase()33def test_tokenizing_ge(ctx:jk_testing.TestContext):34	tokenStream = tokenizer.tokenize(">= 1.2.3")35	tokenStream.dump(printFunc=ctx.log.debug)36	jk_testing.Assert.isEqual(len(tokenStream), 2)37	jk_testing.Assert.isNotNone(38		tokenStream.tryMatchSequence(39			jk_php_version_parser._ComposerTokenPattern("op", ">="),40			jk_php_version_parser._ComposerTokenPattern("v", "1.2.3"),41		)42	)43#44@jk_testing.TestCase()45def test_tokenizing_gt(ctx:jk_testing.TestContext):46	tokenStream = tokenizer.tokenize("> 1.2.3")47	tokenStream.dump(printFunc=ctx.log.debug)48	jk_testing.Assert.isEqual(len(tokenStream), 2)49	jk_testing.Assert.isNotNone(50		tokenStream.tryMatchSequence(51			jk_php_version_parser._ComposerTokenPattern("op", ">"),52			jk_php_version_parser._ComposerTokenPattern("v", "1.2.3"),53		)54	)55#56@jk_testing.TestCase()57def test_tokenizing_le(ctx:jk_testing.TestContext):58	tokenStream = tokenizer.tokenize("<= 1.2.3")59	tokenStream.dump(printFunc=ctx.log.debug)60	jk_testing.Assert.isEqual(len(tokenStream), 2)61	jk_testing.Assert.isNotNone(62		tokenStream.tryMatchSequence(63			jk_php_version_parser._ComposerTokenPattern("op", "<="),64			jk_php_version_parser._ComposerTokenPattern("v", "1.2.3"),65		)66	)67#68@jk_testing.TestCase()69def test_tokenizing_lt(ctx:jk_testing.TestContext):70	tokenStream = tokenizer.tokenize("< 1.2.3")71	tokenStream.dump(printFunc=ctx.log.debug)72	jk_testing.Assert.isEqual(len(tokenStream), 2)73	jk_testing.Assert.isNotNone(74		tokenStream.tryMatchSequence(75			jk_php_version_parser._ComposerTokenPattern("op", "<"),76			jk_php_version_parser._ComposerTokenPattern("v", "1.2.3"),77		)78	)79#80@jk_testing.TestCase()81def test_tokenizing_range(ctx:jk_testing.TestContext):82	tokenStream = tokenizer.tokenize("1.2.3 - 2.3.4")83	tokenStream.dump(printFunc=ctx.log.debug)84	jk_testing.Assert.isEqual(len(tokenStream), 3)85	jk_testing.Assert.isNotNone(86		tokenStream.tryMatchSequence(87			jk_php_version_parser._ComposerTokenPattern("v", "1.2.3"),88			jk_php_version_parser._ComposerTokenPattern("op", "-"),89			jk_php_version_parser._ComposerTokenPattern("v", "2.3.4"),90		)91	)92#93@jk_testing.TestCase()94def test_tokenizing_version_wildcard(ctx:jk_testing.TestContext):95	tokenStream = tokenizer.tokenize("1.2.*")96	tokenStream.dump(printFunc=ctx.log.debug)97	jk_testing.Assert.isEqual(len(tokenStream), 1)98	jk_testing.Assert.isNotNone(99		tokenStream.tryMatchSequence(100			jk_php_version_parser._ComposerTokenPattern("v", "1.2.*"),101		)102	)103#104@jk_testing.TestCase()105def test_tokenizing_version_gt_wildcard(ctx:jk_testing.TestContext):106	tokenStream = tokenizer.tokenize(">=1.2.*")107	tokenStream.dump(printFunc=ctx.log.debug)108	jk_testing.Assert.isEqual(len(tokenStream), 2)109	jk_testing.Assert.isNotNone(110		tokenStream.tryMatchSequence(111			jk_php_version_parser._ComposerTokenPattern("op", ">="),112			jk_php_version_parser._ComposerTokenPattern("v", "1.2.*"),113		)114	)115#116@jk_testing.TestCase()117def test_tokenizing_version_caret(ctx:jk_testing.TestContext):118	tokenStream = tokenizer.tokenize("^1.2.3")119	tokenStream.dump(printFunc=ctx.log.debug)120	jk_testing.Assert.isEqual(len(tokenStream), 2)121	jk_testing.Assert.isNotNone(122		tokenStream.tryMatchSequence(123			jk_php_version_parser._ComposerTokenPattern("op", "^"),124			jk_php_version_parser._ComposerTokenPattern("v", "1.2.3"),125		)126	)127#128@jk_testing.TestCase()129def test_tokenizing_version_tilde(ctx:jk_testing.TestContext):130	tokenStream = tokenizer.tokenize("~1.2.3")131	tokenStream.dump(printFunc=ctx.log.debug)132	jk_testing.Assert.isEqual(len(tokenStream), 2)133	jk_testing.Assert.isNotNone(134		tokenStream.tryMatchSequence(135			jk_php_version_parser._ComposerTokenPattern("op", "~"),136			jk_php_version_parser._ComposerTokenPattern("v", "1.2.3"),137		)138	)139#140#141# Errors142#143# TODO144################################################################################################################################145testDriver = jk_testing.TestDriver()146results = testDriver.runTests([147	(test_tokenizing_version_1, True),148	(test_tokenizing_version_2, True),149	(test_tokenizing_ge, True),150	(test_tokenizing_gt, True),151	(test_tokenizing_le, True),152	(test_tokenizing_lt, True),153	(test_tokenizing_range, True),154	(test_tokenizing_version_wildcard, True),155	(test_tokenizing_version_gt_wildcard, True),156	(test_tokenizing_version_caret, True),157	(test_tokenizing_version_tilde, True),158	#(test_OR, True),159	#(test_AND, True),160])161reporter = jk_testing.TestReporterHTML()...Tokenizer.py
Source:Tokenizer.py  
1from Token import Token2class HingeJointPerceptor:3    def __init__(self, name=None, ax = 0):4        self.name = name5        self.ax = ax6    def fillForm(self, tokenStream):7        next(tokenStream)8        self.name = next(tokenStream).value9        next(tokenStream)10        self.ax = next(tokenStream).value11        return True12class ForcePerceptor:13    def __init__(self, name=None, center=None, force=None):14        self.name = name15        self.center = center16        self.force = force17    def fillForm(self, tokenStream):18        next(tokenStream)19        self.name = next(tokenStream).value20        next(tokenStream)21        self.center = []22        for _ in range(3):23            self.center.append(next(tokenStream).value)24        next(tokenStream)25        self.force = []26        for _ in range(3):27            self.force.append(next(tokenStream).value)28class ACCPerceptor:29    def __init__(self, name=None, a=None):30        self.name = name31        self.a = a32    def fillForm(self, tokenStream):33        next(tokenStream)34        self.name = next(tokenStream).value35        next(tokenStream)36        self.a = []37        for _ in range(3):38            self.a.append(next(tokenStream).value)39def splitMessage2Token(msg):40    msgList = msg.split()41    msgList2 = []42    for s in msgList:43        msgList2[len(msgList2):len(msgList2)] = s.split('(')44    msgList3 = []45    for s in msgList2:46        msgList3[len(msgList3):len(msgList3)] = s.split(')')47    msgList3 = [s for s in msgList3 if s != '']48    return [Token(s) for s in msgList3] 49    50def fillPerceptor(tokenList):51    tokenStream = iter(tokenList)52    token = next(tokenStream)53    HJList, ACCList, forceList = [], [], []54    try:55        while True:56            if token.type == 0:57                HJ = HingeJointPerceptor()58                HJ.fillForm(tokenStream)59                HJList.append(HJ)60            elif token.type == 1:61                ACC = ACCPerceptor(tokenStream)62                ACC.fillForm(tokenStream)63                ACCList.append(ACC)64            elif token.type == 2:65                force = ForcePerceptor(tokenStream)66                force.fillForm(tokenStream)67                forceList.append(force)68            token = next(tokenStream)69    except:70        pass71    return HJList, ACCList, forceList72# msg = '(time (now 5887.28))(GS (sl 0) (sr 0) (t 0.00) (pm BeforeKickOff))(GYR (n torso) (rt -42.04 -0.00 -0.00))(ACC (n torso) (a 0.00 -1.34 2.83))(HJ (n hj1) (ax -0.00))(HJ (n hj2) (ax 0.00))(HJ (n raj1) (ax -104.84))(HJ (n raj2) (ax -62.91))(HJ (n raj3) (ax 0.00))(HJ (n raj4) (ax 55.92))(HJ (n laj1) (ax -104.84))(HJ (n laj2) (ax 62.91))(HJ (n laj3) (ax -0.00))(HJ (n laj4) (ax -55.92))(HJ (n rlj1) (ax -0.00))(HJ (n rlj2) (ax -0.00))(HJ (n rlj3) (ax 69.90))(HJ (n rlj4) (ax -130.63))(HJ (n rlj5) (ax 69.90))(FRP (n rf) (c -0.00 -0.08 -0.01) (f -0.00 6.47 26.02))(HJ (n rlj6) (ax 0.00))(HJ (n llj1) (ax -0.00))(HJ (n llj2) (ax 0.00))(HJ (n llj3) (ax 69.90))(HJ (n llj4) (ax -130.63))(HJ (n llj5) (ax 69.90))(FRP (n lf) (c 0.00 -0.08 -0.01) (f 0.00 6.47 26.02))(HJ (n llj6) (ax -0.00))'73# a, b, c = fillPerceptor(splitMessage2Token(msg))74# for cc in a:...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
