How to use match_token method in Gherkin-python

Best Python code snippet using gherkin-python

Run Gherkin-python automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

RuleOutput.py

Source: RuleOutput.py Github

copy
1# defines a RuleOutput class
2# and its subclasses
3# these perform singular operators on Ingredient objects
4
5from .MyNumber import MyNumber
6from .Ingredient import Ingredient
7from .FormatCheck import listOfStrings, listOfTupleStringBools
8
9class RuleOutput(object):
10    def __init__(self):
11        raise Exception("creating an instance of RuleOutput base class is forbidden")
12    @staticmethod
13    def check_conformity(ig,match_token,match_token_length):
14        # check that an input to apply makes sense
15        if not isinstance(ig,Ingredient):
16            raise TypeError("Expected ig to be an Ingredient")
17        if type(match_token) != tuple:
18            raise TypeError("Expected tuple")
19        if type(match_token_length) != int:
20            raise TypeError("Expected int")
21        if len(match_token) != match_token_length:
22            raise ValueError("Expected tuple of length "+str(match_token_length))
23        if not all(map(lambda x: type(x) == int, match_token)):
24            raise TypeError("Expected tuple filled with ints")
25
26class NoneRuleOutput(RuleOutput):
27    def __init__(self):
28        pass
29    @staticmethod
30    def apply(ig,match_token):
31        return ig
32    @staticmethod
33    def priority():
34        return 1
35
36NoneRuleOutputInstance = NoneRuleOutput()
37
38class RenamingRuleOutput(RuleOutput):
39    def __init__(self,output_name):
40        if output_name is not None:
41            listOfStrings(output_name)
42        self.output_name = output_name
43    def apply(self,ig,match_token):
44        if self.output_name is None:
45            return ig
46        # match token must come from SinglePattern
47        self.check_conformity(ig,match_token,2)
48        output = ig.duplicate()
49        if match_token[0] == 1:
50            # editing unit
51            output.unit = output.unit[:max(0,match_token[1])] + self.output_name
52        else:
53            #editing name
54            output.name = output.name[:max(0,match_token[1])] + self.output_name
55        return output
56    @staticmethod
57    def priority():
58        return 2
59
60class PrefixingRuleOutput(RuleOutput):
61    def __init__(self,prefix):
62        listOfStrings(prefix)
63        self.prefix = prefix
64    def apply(self,ig,match_token):
65        # match token must come from SinglePattern
66        self.check_conformity(ig,match_token,2)
67        if match_token[1] > 0:
68            # if the input already has a prefix,
69            # we shouldn't change it
70            return ig
71        output = ig.duplicate()
72        if match_token[0] == 1:
73            # editing unit
74            output.unit = self.prefix + output.unit
75        else:
76            # editing name
77            output.name = self.prefix + output.name
78        return output
79    @staticmethod
80    def priority():
81        return 3
82
83class InsertingRuleOutput(RuleOutput):
84    def __init__(self,pattern_size,insertion):
85        if type(pattern_size) != int:
86            raise TypeError("Expeected int")
87        if insertion != []:
88            listOfStrings(insertion)
89        self.pattern_size = pattern_size
90        self.insertion = insertion
91    def apply(self,ig,match_token):
92        # match token must come from SinglePattern
93        self.check_conformity(ig,match_token,2)
94        output = ig.duplicate()
95        if match_token[0] == 1:
96            # editing unit
97            output.unit = output.unit[:max(0,match_token[1])] + self.insertion + output.unit[max(0,match_token[1])+self.pattern_size:]
98        else:
99            # editing name
100            output.name = output.name[:max(0,match_token[1])] + self.insertion + output.name[max(0,match_token[1])+self.pattern_size:]
101        return output
102    @staticmethod
103    def priority():
104        return 2
105
106class SingleConvertingRuleOutput(RenamingRuleOutput):
107    def __init__(self,ratio,output_name):
108        super().__init__(output_name)
109        if not isinstance(ratio,MyNumber):
110            raise TypeError("Expected MyNumber")
111        self.ratio = ratio
112    def apply(self,ig,match_token):
113        self.check_conformity(ig,match_token,2)
114        output = super().apply(ig,match_token).duplicate()
115        output.count = output.count * self.ratio
116        return output
117    @staticmethod
118    def priority():
119        return 5
120
121class DoubleConvertingRuleOutput(RuleOutput):
122    def __init__(self,ratio,output_unit,output_name):
123        # ratio, output_unit, and output_name
124        # may each be None to indicate a wildcard
125        if ratio is not None:
126            if not isinstance(ratio,MyNumber):
127                raise TypeError("Expected MyNumber")
128        if output_unit is not None:
129            output_unit = RenamingRuleOutput(output_unit)
130        if output_name is not None:
131            output_name = RenamingRuleOutput(output_name)
132        self.ratio = ratio
133        self.output_unit = output_unit
134        self.output_name = output_name
135    def apply(self,ig,match_token):
136        # match_token must be from DoublePattern
137        self.check_conformity(ig,match_token,3)
138        if self.output_unit is not None:
139            ig = self.output_unit.apply(ig,(1,max(0,match_token[1])))
140        if self.output_name is not None:
141            ig = self.output_name.apply(ig,(2,max(0,match_token[2])))
142        if self.ratio is not None:
143            ig = ig.duplicate()
144            ig.count = ig.count * self.ratio
145        return ig
146    @staticmethod
147    def priority():
148        return 5
149
150class PropertiesRuleOutput(RuleOutput):
151    def __init__(self,base,edits):
152        if not isinstance(base,RuleOutput):
153            raise TypeError("Expected child class of RuleOutput")
154        listOfTupleStringBools(edits)
155        self.base = base
156        self.edits = edits
157    def apply(self,ig,match_token):
158        output = self.base.apply(ig,match_token).duplicate()
159        for edit in self.edits:
160            # edit is (str,bool)
161            if edit[1]:
162                output.props.add(edit[0])
163            else:
164                if edit[0] == "$":
165                    # remove all
166                    output.props = set()
167                elif edit[0] in output.props:
168                    output.props.remove(edit[0])
169        return output
170    def priority(self):
171        return self.base.priority()
172
173class DecRuleOutput(RuleOutput):
174    def __init__(self):
175        pass
176    @staticmethod
177    def apply(ig,match_token):
178        output = ig.duplicate()
179        output.count = output.count.as_float()
180        return output
181    @staticmethod
182    def priority():
183        return 1
184
185DecRuleOutputInstance = DecRuleOutput()
186
187class FracRuleOutput(RuleOutput):
188    def __init__(self):
189        pass
190    @staticmethod
191    def apply(ig,match_token):
192        output = ig.duplicate()
193        output.count = output.count.as_fraction(10)
194        return output
195    @staticmethod
196    def priority():
197        return 1
198    
199FracRuleOutputInstance = FracRuleOutput()
Full Screen

statement.py

Source: statement.py Github

copy
1""" Parser logic that parses statement nodes """
2
3from myparser.utils import (add_range, log_error, match_token, token_is, ParserError)
4from myparser.declaration import parse_declaration
5from myparser.expression import parse_expression
6import myparser.utils as p
7import tree.tree as nodes
8import token_kinds
9
10
11@add_range
12def parse_statement(index):
13    """ Parse a statement. Try each possible type of statement, catching/logging exceptions upon parse failures.
14    On the last try, raise the exception on to the caller.
15    """
16    for func in (parse_compound_statement, parse_return, parse_break, parse_continue, parse_if_statement,
17                 parse_while_statement, parse_do_while_statement, parse_for_statement):
18        with log_error():
19            return func(index)
20
21    return parse_expr_statement(index)
22
23
24@add_range
25def parse_compound_statement(index):
26    """ Parse a compound statement.
27    A compound statement is a collection of several statements/declarations, enclosed in braces.
28    """
29    p.symbols.new_scope()
30    index = match_token(index, token_kinds.open_brack, ParserError.GOT)
31
32    # Read block items (statements/declarations) until there are no more.
33    items = []
34    while True:
35        with log_error():
36            item, index = parse_statement(index)
37            items.append(item)
38            continue
39
40        with log_error():
41            item, index = parse_declaration(index)
42            items.append(item)
43            continue
44
45        break
46
47    index = match_token(index, token_kinds.close_brack, ParserError.GOT)
48    p.symbols.end_scope()
49
50    return nodes.Compound(items), index
51
52
53@add_range
54def parse_return(index):
55    """ Parse a return statement.
56        Ex: return 5;
57    """
58    index = match_token(index, token_kinds.return_kw, ParserError.GOT)
59    if token_is(index, token_kinds.semicolon):
60        return nodes.Return(None), index
61
62    node, index = parse_expression(index)
63
64    index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
65    return nodes.Return(node), index
66
67
68@add_range
69def parse_break(index):
70    """ Parse a break statement """
71    index = match_token(index, token_kinds.break_kw, ParserError.GOT)
72    index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
73    return nodes.Break(), index
74
75
76@add_range
77def parse_continue(index):
78    """ Parse a continue statement """
79    index = match_token(index, token_kinds.continue_kw, ParserError.GOT)
80    index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
81    return nodes.Continue(), index
82
83
84@add_range
85def parse_if_statement(index):
86    """ Parse an if statement """
87
88    index = match_token(index, token_kinds.if_kw, ParserError.GOT)
89    index = match_token(index, token_kinds.open_paren, ParserError.AFTER)
90    conditional, index = parse_expression(index)
91    index = match_token(index, token_kinds.close_paren, ParserError.AFTER)
92    statement, index = parse_statement(index)
93
94    # If there is an else that follows, parse that too.
95    is_else = token_is(index, token_kinds.else_kw)
96    if not is_else:
97        else_statement = None
98    else:
99        index = match_token(index, token_kinds.else_kw, ParserError.GOT)
100        else_statement, index = parse_statement(index)
101
102    return nodes.IfStatement(conditional, statement, else_statement), index
103
104
105@add_range
106def parse_while_statement(index):
107    """ Parse a while statement """
108    index = match_token(index, token_kinds.while_kw, ParserError.GOT)
109    index = match_token(index, token_kinds.open_paren, ParserError.AFTER)
110    conditional, index = parse_expression(index)
111    index = match_token(index, token_kinds.close_paren, ParserError.AFTER)
112    statement, index = parse_statement(index)
113
114    return nodes.WhileStatement(conditional, statement), index
115
116
117@add_range
118def parse_do_while_statement(index):
119    """ Parse a do-while statement """
120    index = match_token(index, token_kinds.do_kw, ParserError.GOT)
121    statement, index = parse_statement(index)
122    index = match_token(index, token_kinds.while_kw, ParserError.GOT)
123    index = match_token(index, token_kinds.open_paren, ParserError.AFTER)
124    conditional, index = parse_expression(index)
125    index = match_token(index, token_kinds.close_paren, ParserError.AFTER)
126    index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
127
128    return nodes.DoWhileStatement(conditional, statement), index
129
130
131@add_range
132def parse_for_statement(index):
133    """ Parse a for statement """
134    index = match_token(index, token_kinds.for_kw, ParserError.GOT)
135    index = match_token(index, token_kinds.open_paren, ParserError.AFTER)
136
137    first, second, third, index = get_for_clauses(index)
138    stat, index = parse_statement(index)
139
140    return nodes.ForStatement(first, second, third, stat), index
141
142
143def get_for_clauses(index):
144    """Get the three clauses of a for-statement.
145
146        index - Index of the beginning of the first clause.
147
148        returns - Tuple (Node, Node, Node, index). Each Node is the corresponding clause, or None if that clause is
149        empty The index is that of first token after the close paren terminating the for clauses.
150
151    Raises exception on malformed input.
152    """
153
154    first, index = get_first_for_clause(index)
155
156    if token_is(index, token_kinds.semicolon):
157        second = None
158        index += 1
159    else:
160        second, index = parse_expression(index)
161        index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
162
163    if token_is(index, token_kinds.close_paren):
164        third = None
165        index += 1
166    else:
167        third, index = parse_expression(index)
168        index = match_token(index, token_kinds.close_paren, ParserError.AFTER)
169
170    return first, second, third, index
171
172
173def get_first_for_clause(index):
174    """Get the first clause of a for-statement.
175
176        index - Index of the beginning of the first clause in the for-statement.
177
178        returns - Tuple. First element is a node if a clause is found and None if there is no clause (i.e. semicolon
179        terminating the clause). Second element is an integer index where the next token begins.
180
181    If malformed, raises exception.
182    """
183    if token_is(index, token_kinds.semicolon): return None, index + 1
184
185    with log_error():
186        return parse_declaration(index)
187
188    clause, index = parse_expression(index)
189    index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
190    return clause, index
191
192
193@add_range
194def parse_expr_statement(index):
195    """Parse a statement that is an expression.
196        Ex: a = 3 + 4
197    """
198    if token_is(index, token_kinds.semicolon):
199        return nodes.EmptyStatement(), index + 1
200
201    node, index = parse_expression(index)
202    index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
203    return nodes.ExprStatement(node), index
204
Full Screen

Parser.py

Source: Parser.py Github

copy
1from utils import Token_Type
2from utils import ExprNode
3from utils import syntax_error
4from Lexer import Lexer
5from utils import set_param, get_param, change_param
6import math
7import matplotlib.pyplot as plt
8
9# global
10Origin_x = 0.0
11Origin_y = 0.0
12Rot_ang = 0.0
13Scale_x = 1
14Scale_y = 1
15Color = 'BLACK'
16
17
18def set_origin(x, y):
19    global Origin_x, Origin_y
20    Origin_x = x
21    Origin_y = y
22    print("^^^^^^^^^^^Set Origin^^^^^^^^^^^^")
23    print(str(Origin_x) + ' ' + str(Origin_y))
24
25
26def set_scale(x, y):
27    global Scale_x, Scale_y
28    Scale_x = x
29    Scale_y = y
30    print("^^^^^^^^^^^Set Scale^^^^^^^^^^^^")
31    print(str(Scale_x) + ' ' + str(Scale_y))
32
33
34def set_rot(x):
35    global Rot_ang
36    Rot_ang = x
37    print("^^^^^^^^^^^Set Rot^^^^^^^^^^^^")
38    print(str(Rot_ang))
39
40
41def set_color(x):
42    global Color
43    Color = x
44    print("^^^^^^^^^^^Set Color^^^^^^^^^^^^")
45    print(Color)
46
47
48# get the value of expression
49# dfs
50def get_expr_value(root):
51    if root is None:
52        return 0.0
53    if root.type == Token_Type.PLUS.name:
54        return float(get_expr_value(root.left)) + float(get_expr_value(root.right))
55    elif root.type == Token_Type.MINUS.name:
56        return float(get_expr_value(root.left)) - float(get_expr_value(root.right))
57    elif root.type == Token_Type.MUL.name:
58        return float(get_expr_value(root.left)) * float(get_expr_value(root.right))
59    elif root.type == Token_Type.DIV.name:
60        return float(get_expr_value(root.left)) / float(get_expr_value(root.right))
61    elif root.type == Token_Type.FUNC.name:
62        return float(root.func(get_expr_value(root.left)))
63    elif root.type == Token_Type.CONST_ID.name:
64        return float(root.value)
65    elif root.type == Token_Type.T.name:
66        return float(root.get_param())
67    return 0.0
68
69
70# calculate (x, y) after transformation
71def cal_coord(x_ptr, y_ptr):
72    global Origin_x, Origin_y, Scale_x, Scale_y
73    x = get_expr_value(x_ptr)
74    y = get_expr_value(y_ptr)
75    # scaling
76    x *= Scale_x
77    y *= Scale_y
78    # rotation
79    temp = x * math.cos(Rot_ang) + y * math.sin(Rot_ang)
80    y = y * math.cos(Rot_ang) - x * math.sin(Rot_ang)
81    x = temp
82    # translation
83    x += Origin_x
84    y += Origin_y
85    return x, y
86
87
88# draw the pic dot by dot
89def draw_loop(start, end, step, x_ptr, y_ptr):
90    global Color
91    set_param(start)
92    while get_param() <= end:
93        x, y = cal_coord(x_ptr, y_ptr)
94        if Color == 'RED':
95            plt.plot(x, y, 'r.')
96        elif Color == 'GREEN':
97            plt.plot(x, y, 'g.')
98        elif Color == 'BLUE':
99            plt.plot(x, y, 'b.')
100        else:
101            plt.plot(x, y, 'k.')
102        change_param(step)
103
104
105def close_scanner():
106    print("Close scanner")
107
108
109# print the syntax tree
110def print_tree(root):
111    if root is not None:
112        root.show()
113        print("left_child: ")
114        print_tree(root.left)
115        print("right_child: ")
116        print_tree(root.right)
117
118
119class Parser:
120    def __init__(self, filename):
121        self.lexer = Lexer(filename)
122        self.token = None
123        self.root = None
124
125    def start(self):
126        print("-----Enter Start-----")
127        self.lexer.start()
128        self.fetch_token()
129        self.program()
130        close_scanner()
131        print("-----Exit Start-----")
132
133    # get one token
134    def fetch_token(self):
135        print("-----Enter FetchToken-----")
136        self.token = self.lexer.gettoken()
137        if self.token.type == Token_Type.ERRTOKEN.name:
138            syntax_error(1)
139        # skip comment
140        if self.token.type == Token_Type.COMMENT.name:
141            self.fetch_token()
142        print("-----Exit FetchToken-----")
143
144    def match_token(self, ob):
145        print("-----Enter MatchToken-----")
146        if self.token.type != ob:
147            syntax_error(2, sb=self.token.type, ob=ob)
148            print("-----Exit MatchToken-----")
149            return False
150        print("*****MatchToken " + ob + "*****")
151        print("-----Exit MatchToken-----")
152        return True
153
154    def program(self):
155        print("-----Enter Program-----")
156        while self.token.type != Token_Type.NONTOKEN.name:
157            self.statement()
158            # end with ';'
159            self.match_token(Token_Type.SEMICO.name)
160            self.fetch_token()
161        print("-----Exit Program-----")
162
163    def statement(self):
164        print("-----Enter Statement-----")
165        if self.token.type == Token_Type.ORIGIN.name:
166            self.origin_statement()
167        elif self.token.type == Token_Type.SCALE.name:
168            self.scale_statement()
169        elif self.token.type == Token_Type.ROT.name:
170            self.rot_statement()
171        elif self.token.type == Token_Type.FOR.name:
172            self.for_statement()
173        elif self.token.type == Token_Type.COLOR.name:
174            self.color_statement()
175        else:
176            syntax_error(3)
177        print("-----Exit Statement-----")
178
179    def origin_statement(self):
180        print("-----Enter OriginStatement-----")
181        self.match_token(Token_Type.ORIGIN.name)
182        self.fetch_token()
183        self.match_token(Token_Type.IS.name)
184        self.fetch_token()
185        self.match_token(Token_Type.L_BRACKET.name)
186        self.fetch_token()
187        tmp_ptr = self.expression()
188        print("--------------------------------------------------")
189        print_tree(tmp_ptr)
190        print("--------------------------------------------------")
191
192        x = get_expr_value(tmp_ptr)
193
194        self.match_token(Token_Type.COMMA.name)
195        self.fetch_token()
196        tmp_ptr = self.expression()
197        print("--------------------------------------------------")
198        print_tree(tmp_ptr)
199        print("--------------------------------------------------")
200
201        y = get_expr_value(tmp_ptr)
202
203        self.match_token(Token_Type.R_BRACKET.name)
204        self.fetch_token()
205
206        set_origin(x, y)
207
208        print("-----Exit OriginStatement-----")
209
210    def scale_statement(self):
211        print("-----Enter ScaleStatement-----")
212        self.match_token(Token_Type.SCALE.name)
213        self.fetch_token()
214        self.match_token(Token_Type.IS.name)
215        self.fetch_token()
216        self.match_token(Token_Type.L_BRACKET.name)
217        self.fetch_token()
218        tmp_ptr = self.expression()
219        print("--------------------------------------------------")
220        print_tree(tmp_ptr)
221        print("--------------------------------------------------")
222
223        x = get_expr_value(tmp_ptr)
224
225        self.match_token(Token_Type.COMMA.name)
226        self.fetch_token()
227        tmp_ptr = self.expression()
228        print("--------------------------------------------------")
229        print_tree(tmp_ptr)
230        print("--------------------------------------------------")
231
232        y = get_expr_value(tmp_ptr)
233
234        self.match_token(Token_Type.R_BRACKET.name)
235        self.fetch_token()
236
237        set_scale(x, y)
238
239        print("-----Exit ScaleStatement-----")
240
241    def rot_statement(self):
242        print("-----Enter RotStatement-----")
243        self.match_token(Token_Type.ROT.name)
244        self.fetch_token()
245        self.match_token(Token_Type.IS.name)
246        self.fetch_token()
247        tmp_ptr = self.expression()
248        print("--------------------------------------------------")
249        print_tree(tmp_ptr)
250        print("--------------------------------------------------")
251
252        x = get_expr_value(tmp_ptr)
253
254        # self.fetch_token()
255
256        set_rot(x)
257        print("-----Exit RotStatement-----")
258
259    def for_statement(self):
260        print("-----Enter ForStatement-----")
261        self.match_token(Token_Type.FOR.name)
262        self.fetch_token()
263        self.match_token(Token_Type.T.name)
264        self.fetch_token()
265        self.match_token(Token_Type.FROM.name)
266        self.fetch_token()
267        start_ptr = self.expression()
268        print("--------------------------------------------------")
269        print_tree(start_ptr)
270        print("--------------------------------------------------")
271
272        start = get_expr_value(start_ptr)
273
274        self.match_token(Token_Type.TO.name)
275        self.fetch_token()
276        end_ptr = self.expression()
277        print("--------------------------------------------------")
278        print_tree(end_ptr)
279        print("--------------------------------------------------")
280
281        end = get_expr_value(end_ptr)
282
283        self.match_token(Token_Type.STEP.name)
284        self.fetch_token()
285        step_ptr = self.expression()
286        print("--------------------------------------------------")
287        print_tree(step_ptr)
288        print("--------------------------------------------------")
289
290        step = get_expr_value(step_ptr)
291
292        self.match_token(Token_Type.DRAW.name)
293        self.fetch_token()
294        self.match_token(Token_Type.L_BRACKET.name)
295        self.fetch_token()
296        x_ptr = self.expression()
297        print("--------------------------------------------------")
298        print_tree(x_ptr)
299        print("--------------------------------------------------")
300        self.match_token(Token_Type.COMMA.name)
301        self.fetch_token()
302        y_ptr = self.expression()
303        print("--------------------------------------------------")
304        print_tree(y_ptr)
305        print("--------------------------------------------------")
306        self.match_token(Token_Type.R_BRACKET.name)
307        self.fetch_token()
308
309        draw_loop(start, end, step, x_ptr, y_ptr)
310
311        print("-----Exit ForStatement-----")
312
313    def color_statement(self):
314        print("-----Enter ColorStatement-----")
315        self.match_token(Token_Type.COLOR.name)
316        self.fetch_token()
317        self.match_token(Token_Type.IS.name)
318        self.fetch_token()
319        self.match_token(Token_Type.SP_COLOR.name)
320
321        set_color(self.token.lexeme)
322
323        self.fetch_token()
324        print("-----Exit ColorStatement-----")
325
326    def expression(self):
327        print("-----Enter Expression-----")
328        left = self.term()
329        while self.token.type == Token_Type.PLUS.name or self.token.type == Token_Type.MINUS.name:
330            token_tmp = self.token.type
331            self.match_token(token_tmp)
332            right = self.term()
333            left = ExprNode(token_tmp, lnode=left, rnode=right)
334        print("-----Exit Expression-----")
335        return left
336
337    def term(self):
338        print("-----Enter Term-----")
339        left = self.factor()
340        while self.token.type == Token_Type.MUL.name or self.token.type == Token_Type.DIV.name:
341            token_tmp = self.token.type
342            self.match_token(token_tmp)
343            self.fetch_token()
344            right = self.factor()
345            left = ExprNode(token_tmp, lnode=left, rnode=right)
346        print("-----Exit Term-----")
347        return left
348
349    def factor(self):
350        print("-----Enter Factor-----")
351        if self.token.type == Token_Type.PLUS.name or self.token.type == Token_Type.MINUS.name:
352            token_tmp = self.token.type
353            self.match_token(token_tmp)
354            left = ExprNode(Token_Type.CONST_ID.name, 0)
355            self.fetch_token()
356            right = self.factor()
357            res = ExprNode(token_tmp, lnode=left, rnode=right)
358            print("-----Exit Factor-----")
359            return res
360        else:
361            res = self.component()
362            print("-----Exit Factor-----")
363            return res
364
365    def component(self):
366        print("-----Enter Component-----")
367        left = self.atom()
368        self.fetch_token()
369        while self.token.type == Token_Type.POWER.name:
370            token_tmp = self.token.type
371            self.match_token(token_tmp)
372            self.fetch_token()
373            right = self.component()
374            left = ExprNode(token_tmp, lnode=left, rnode=right)
375        print("-----Exit Component-----")
376        return left
377
378    def atom(self):
379        print("-----Enter Atom-----")
380        if self.token.type == Token_Type.CONST_ID.name:
381            print("leaf: " + str(self.token.value))
382            print("-----Exit Atom-----")
383            return ExprNode(self.token.type, self.token.value)  # leaf
384        elif self.token.type == Token_Type.T.name:
385            print("leaf: " + self.token.type)
386            print("-----Exit Atom-----")
387            return ExprNode(self.token.type, self.token.value)  # leaf
388        elif self.token.type == Token_Type.FUNC.name:
389            token_tmp = self.token.type
390            func_tmp = self.token.func
391            self.fetch_token()
392            self.match_token(Token_Type.L_BRACKET.name)
393            self.fetch_token()
394            left = self.expression()
395            self.match_token(Token_Type.R_BRACKET.name)
396            print("-----Exit Atom-----")
397            return ExprNode(token_tmp, lnode=left, func=func_tmp)
398        elif self.token.type == Token_Type.L_BRACKET:
399            self.match_token(Token_Type.L_BRACKET.name)
400            self.fetch_token()
401            left = self.expression()
402            self.match_token(Token_Type.R_BRACKET.name)
403            print("-----Exit Atom-----")
404            return left
405
406
407if __name__ == '__main__':
408    # init the parser
409    p = Parser("test.txt")
410    # run the parser
411    p.start()
412
413    plt.xlim(0)
414    plt.ylim(0)
415    plt.show()
416
Full Screen

Accelerate Your Automation Test Cycles With LambdaTest

Leverage LambdaTest’s cloud-based platform to execute your automation tests in parallel and trim down your test execution time significantly. Your first 100 automation testing minutes are on us.

Try LambdaTest

Run Python Tests on LambdaTest Cloud Grid

Execute automation tests with Gherkin-python on a cloud-based Grid of 3000+ real browsers and operating systems for both web and mobile applications.

Test now for Free
LambdaTestX

We use cookies to give you the best experience. Cookies help to provide a more personalized experience and relevant advertising for you, and web analytics for us. Learn More in our Cookies policy, Privacy & Terms of service

Allow Cookie
Sarah

I hope you find the best code examples for your project.

If you want to accelerate automated browser testing, try LambdaTest. Your first 100 automation testing minutes are FREE.

Sarah Elson (Product & Growth Lead)