How to use _Tokenize method in autotest

Best Python code snippet using autotest_python

test_sentence_lexer.py

Source:test_sentence_lexer.py Github

copy

Full Screen

1#!/usr/bin/env python2#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*-3from bes.testing.unit_test import unit_test4from bes.text.sentence_lexer import sentence_lexer as L5from bes.text.lexer_token import lexer_token6from bes.text.string_lexer_options import string_lexer_options7from helpers.token_test_helper import *8def TPUNCT(s, x = 1, y = 1): return lexer_token(L.TOKEN_PUNCTUATION, s, (x, y))9class test_sentence_lexer(unit_test):10 def test_empty_string(self):11 self.assertEqual( [ TDONE() ],12 self._tokenize(r'') )13 def test_single_char(self):14 self.assertEqual( [ TSTRING('a'), TDONE() ],15 self._tokenize(r'a') )16 17 def test_one_escape(self):18 self.assertEqual( [ TSTRING('a'), TDONE() ],19 self._tokenize(r'\a') )20 21 def test_escape_backslash(self):22 self.assertEqual( [ TSTRING(r'\a'), TDONE() ],23 self._tokenize(r'\\a') )24 def test_eos_when_escaping(self):25 self.assertEqual( [ TSTRING('a'), TDONE() ],26 self._tokenize('a\\') )27 28 def test_simple(self):29 self.maxDiff = None30 self.assertEqual( [ TSPACE(), TSTRING('foo'), TSPACE(), TDONE() ],31 self._tokenize(r' foo ') )32 33 self.assertEqual( [ TSPACE(), TSTRING('foo'), TSPACE(), TPUNCT('='), TSPACE(), TSTRING('123'), TSPACE(' '), TDONE() ],34 self._tokenize(r' foo = 123 ') )35 self.assertEqual( [ TSPACE(), TSTRING('foo'), TPUNCT('='), TSPACE(), TSTRING('123'), TSPACE(' '), TDONE() ],36 self._tokenize(r' foo= 123 ') )37 self.assertEqual( [ TSPACE(), TSTRING('foo'), TPUNCT('='), TSTRING('123'), TSPACE(' '), TDONE() ],38 self._tokenize(r' foo=123 ') )39 self.assertEqual( [ TSPACE(), TSTRING('foo'), TPUNCT('='), TSTRING('123'), TDONE() ],40 self._tokenize(r' foo=123') )41 self.assertEqual( [ TSTRING('foo'), TPUNCT('='), TSTRING('123'), TDONE() ],42 self._tokenize(r'foo=123') )43 self.assertEqual( [ TSTRING('a'), TSPACE(), TSTRING('b'), TDONE() ],44 self._tokenize(r'a b') )45 def test_quote(self):46 self.assertEqual( [ TSTRING('a b'), TDONE() ],47 self._tokenize(r'"a b"') )48 self.assertEqual( [ TSPACE(), TSTRING('foo bar'), TSPACE(), TSTRING('a b c'), TSPACE(), TDONE() ],49 self._tokenize(r' "foo bar" "a b c" ') )50 self.assertEqual( [ TSPACE(), TSTRING('foo bar'), TSTRING('a b c'), TSPACE(), TDONE() ],51 self._tokenize(r' "foo bar""a b c" ') )52 self.assertEqual( [ TSTRING('foo bar'), TSTRING('a b c'), TSPACE(), TDONE() ],53 self._tokenize(r'"foo bar""a b c" ') )54 self.assertEqual( [ TSTRING('foo bar'), TSTRING('a b c'), TDONE() ],55 self._tokenize(r'"foo bar""a b c"') )56 57 def test_single_quote_escaped_within_quotes(self):58 self.assertEqual( [ TSTRING('a " b'), TDONE() ],59 self._tokenize(r'"a \" b"') )60 self.assertEqual( [ TSTRING('a \' b'), TDONE() ],61 self._tokenize(r'"a \' b"') )62 self.assertEqual( [ TSTRING('a " b'), TDONE() ],63 self._tokenize(r"'a \" b'") )64 self.assertEqual( [ TSTRING('a \' b'), TDONE() ],65 self._tokenize(r"'a \' b'") )66 def test_escaped_spaces(self):67 self.assertEqual( [ TSTRING('a b'), TDONE() ],68 self._tokenize(r'a\ b') )69 self.assertEqual( [ TSTRING('foo'), TPUNCT('='), TSTRING('a b'), TDONE() ],70 self._tokenize(r'foo=a\ b') )71 self.assertEqual( [ TSTRING('fo o'), TPUNCT('='), TSTRING('a b'), TDONE() ],72 self._tokenize(r'fo\ o=a\ b') )73 def test_comment(self):74 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('1'), TSPACE(), TCOMMENT('# hi'), TDONE() ],75 self._tokenize(r'a=1 # hi') )76 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('1'), TCOMMENT('# hi'), TDONE() ],77 self._tokenize(r'a=1# hi') )78 self.assertEqual( [ TCOMMENT('# hi'), TDONE() ],79 self._tokenize(r'# hi') )80 self.assertEqual( [ TSPACE(), TCOMMENT('# hi'), TDONE() ],81 self._tokenize(r' # hi') )82 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSPACE(), TCOMMENT('# hi'), TDONE() ],83 self._tokenize(r'a= # hi') )84 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TCOMMENT('# hi'), TDONE() ],85 self._tokenize(r'a=# hi') )86 def test_quoted_string_inside_string(self):87 self.assertEqual( [ TSTRING('af o o'), TDONE() ],88 self._tokenize(r'a"f o o"') )89 def test_escaped_quote_string_inside_string(self):90 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING(r"'foo'"), TDONE() ],91 self._tokenize(r'a=\'foo\'') )92 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSPACE(), TSTRING(r"'foo'"), TDONE() ],93 self._tokenize(r'a= \'foo\'') )94 def test_escaped_equal_inside_string(self):95 self.assertEqual( [ TSTRING('a='), TPUNCT('='), TSTRING('b'), TDONE() ],96 self._tokenize(r'a\==b') )97 self.assertEqual( [ TSTRING('=a'), TPUNCT('='), TSTRING('b'), TDONE() ],98 self._tokenize(r'\=a=b') )99 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('b='), TDONE() ],100 self._tokenize(r'a=b\=') )101 def test_escaped_space_inside_string(self):102 self.assertEqual( [ TSTRING('a '), TPUNCT('='), TSTRING('b'), TDONE() ],103 self._tokenize(r'a\ =b') )104 self.assertEqual( [ TSTRING(' a'), TPUNCT('='), TSTRING('b'), TDONE() ],105 self._tokenize(r'\ a=b') )106 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('b '), TDONE() ],107 self._tokenize(r'a=b\ ') )108 def test_new_line(self):109 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('foo'), TSPACE(' \n ', y = 2), TSTRING('b', y = 2), TPUNCT('=', y = 2), TSTRING('bar', y = 2), TDONE(y = 2) ],110 self._tokenize('a=foo \n b=bar') )111 def test_escaped_new_line(self):112 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('f\no', y = 2), TDONE(y = 2) ],113 self._tokenize('a=f\\\no') )114 def test_keep_quotes(self):115 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('foo bar baz'), TDONE() ],116 self._tokenize(r'a="foo bar baz"') )117 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('"foo bar baz"'), TDONE() ],118 self._tokenize(r'a="foo bar baz"', keep_quotes = True) )119 def test_keep_quotes_escaped(self):120 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('foo bar baz'), TDONE() ],121 self._tokenize(r'a="foo bar baz"') )122 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('\\"foo bar baz\\"'), TDONE() ],123 self._tokenize(r'a="foo bar baz"', keep_quotes = True, escape_quotes = True) )124 def test_line_numbers(self):125 '''126 1: a=5127 2: b=6128 3: 129 4: c=7130 5:131 '''132 self.assertEqual( [ TSTRING('a'), TPUNCT('='), TSTRING('5'), TSPACE('\n', y = 2), 133 TSTRING('b', y = 2), TPUNCT('=', y = 2), TSTRING('6', y = 2), TSPACE('\n\n', y = 4),134# TSPACE('\n\n', y = 4), 135 TSTRING('c', y = 4), TPUNCT('=', y = 4), TSTRING('7', y = 4), TSPACE('\n', y = 5), 136 TDONE(y = 5) ],137 self._tokenize('a=5\nb=6\n\nc=7\n') )138 def test_punctuation(self):139 self.assertEqual( [ TSTRING('a'), TPUNCT('&'), TSTRING('b'), TDONE() ],140 self._tokenize('a&b') )141 self.assertEqual( [ TSTRING('a'), TPUNCT('&'), TPUNCT('&'), TSTRING('b'), TDONE() ],142 self._tokenize('a&&b') )143 def test_underscore(self):144 self.assertEqual( [ TSPACE(), TSTRING('foo_bar'), TSPACE(), TDONE() ],145 self._tokenize(r' foo_bar ') )146 147 def test_ignore_comments(self):148 self.assertEqual( [ TSTRING('foo'), TSPACE(), TSTRING('#bar'), TDONE() ],149 self._tokenize(r'foo #bar', ignore_comments = True) )150 151 @classmethod152 def _tokenize(self, text,153 keep_quotes = False,154 escape_quotes = False,155 ignore_comments = False,156 ignore_spaces = False):157 options = 0158 if keep_quotes:159 options |= string_lexer_options.KEEP_QUOTES160 if escape_quotes:161 options |= string_lexer_options.ESCAPE_QUOTES162 if ignore_comments:163 options |= string_lexer_options.IGNORE_COMMENTS164 if ignore_spaces:165 options |= string_lexer_options.IGNORE_SPACES166 return [ token for token in L.tokenize(text, options = options) ]167 def assertEqual(self, expected, actual):168 assert isinstance(expected, list)169 expected = [ lexer_token(*t) for t in expected ]170 super(test_sentence_lexer, self).assertEqual(expected, actual)171if __name__ == '__main__':...

Full Screen

Full Screen

test_string_lexer.py

Source:test_string_lexer.py Github

copy

Full Screen

1#!/usr/bin/env python2#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*-3import unittest4from bes.text.lexer_token import lexer_token5from bes.text.string_lexer import string_lexer as L6from bes.text.string_lexer_options import string_lexer_options7from helpers.token_test_helper import *8class test_string_lexer(unittest.TestCase):9 def test_empty_string(self):10 self.assertEqual( [ TDONE() ],11 self._tokenize(r'') )12 def test_single_char(self):13 self.assertEqual( [ TSTRING('a'), TDONE() ],14 self._tokenize(r'a') )15 16 def test_one_escape(self):17 self.assertEqual( [ TSTRING('a'), TDONE() ],18 self._tokenize(r'\a') )19 20 def test_escape_backslash(self):21 self.assertEqual( [ TSTRING(r'\a'), TDONE() ],22 self._tokenize(r'\\a') )23 def test_eos_when_escaping(self):24 self.assertEqual( [ TSTRING('a'), TDONE() ],25 self._tokenize('a\\') )26 27 def test_simple(self):28 self.maxDiff = None29 self.assertEqual( [ TSPACE(), TSTRING('foo'), TSPACE(), TDONE() ],30 self._tokenize(r' foo ') )31 32 self.assertEqual( [ TSPACE(), TSTRING('foo'), TSPACE(), TSTRING('='), TSPACE(), TSTRING('123'), TSPACE(' '), TDONE() ],33 self._tokenize(r' foo = 123 ') )34 self.assertEqual( [ TSPACE(), TSTRING('foo='), TSPACE(), TSTRING('123'), TSPACE(' '), TDONE() ],35 self._tokenize(r' foo= 123 ') )36 self.assertEqual( [ TSPACE(), TSTRING('foo=123'), TSPACE(' '), TDONE() ],37 self._tokenize(r' foo=123 ') )38 self.assertEqual( [ TSPACE(), TSTRING('foo=123'), TDONE() ],39 self._tokenize(r' foo=123') )40 self.assertEqual( [ TSTRING('foo=123'), TDONE() ],41 self._tokenize(r'foo=123') )42 self.assertEqual( [ TSTRING('a'), TSPACE(), TSTRING('b'), TDONE() ],43 self._tokenize(r'a b') )44 def test_quote(self):45 self.assertEqual( [ TSTRING('a b'), TDONE() ],46 self._tokenize(r'"a b"') )47 self.assertEqual( [ TSPACE(), TSTRING('foo bar'), TSPACE(), TSTRING('a b c'), TSPACE(), TDONE() ],48 self._tokenize(r' "foo bar" "a b c" ') )49 self.assertEqual( [ TSPACE(), TSTRING('foo bar'), TSTRING('a b c'), TSPACE(), TDONE() ],50 self._tokenize(r' "foo bar""a b c" ') )51 self.assertEqual( [ TSTRING('foo bar'), TSTRING('a b c'), TSPACE(), TDONE() ],52 self._tokenize(r'"foo bar""a b c" ') )53 self.assertEqual( [ TSTRING('foo bar'), TSTRING('a b c'), TDONE() ],54 self._tokenize(r'"foo bar""a b c"') )55 56 def test_single_quote_escaped_within_quotes(self):57 self.assertEqual( [ TSTRING('a " b'), TDONE() ],58 self._tokenize(r'"a \" b"') )59 self.assertEqual( [ TSTRING('a \' b'), TDONE() ],60 self._tokenize(r'"a \' b"') )61 self.assertEqual( [ TSTRING('a " b'), TDONE() ],62 self._tokenize(r"'a \" b'") )63 self.assertEqual( [ TSTRING('a \' b'), TDONE() ],64 self._tokenize(r"'a \' b'") )65 def test_escaped_spaces(self):66 self.assertEqual( [ TSTRING('a b'), TDONE() ],67 self._tokenize(r'a\ b') )68 self.assertEqual( [ TSTRING('foo=a b'), TDONE() ],69 self._tokenize(r'foo=a\ b') )70 self.assertEqual( [ TSTRING('fo o=a b'), TDONE() ],71 self._tokenize(r'fo\ o=a\ b') )72 def test_comment(self):73 self.assertEqual( [ TSTRING('a=1'), TSPACE(), TCOMMENT('# hi'), TDONE() ],74 self._tokenize(r'a=1 # hi') )75 self.assertEqual( [ TSTRING('a=1'), TCOMMENT('# hi'), TDONE() ],76 self._tokenize(r'a=1# hi') )77 self.assertEqual( [ TCOMMENT('# hi'), TDONE() ],78 self._tokenize(r'# hi') )79 self.assertEqual( [ TSPACE(), TCOMMENT('# hi'), TDONE() ],80 self._tokenize(r' # hi') )81 self.assertEqual( [ TSTRING('a='), TSPACE(), TCOMMENT('# hi'), TDONE() ],82 self._tokenize(r'a= # hi') )83 self.assertEqual( [ TSTRING('a='), TCOMMENT('# hi'), TDONE() ],84 self._tokenize(r'a=# hi') )85 def test_quoted_string_inside_string(self):86 self.assertEqual( [ TSTRING('af o o'), TDONE() ],87 self._tokenize(r'a"f o o"') )88 def test_escaped_quote_string_inside_string(self):89 self.assertEqual( [ TSTRING(r"a='foo'"), TDONE() ],90 self._tokenize(r'a=\'foo\'') )91 self.assertEqual( [ TSTRING('a='), TSPACE(), TSTRING(r"'foo'"), TDONE() ],92 self._tokenize(r'a= \'foo\'') )93 def test_escaped_equal_inside_string(self):94 self.assertEqual( [ TSTRING('a==b'), TDONE() ],95 self._tokenize(r'a\==b') )96 self.assertEqual( [ TSTRING('=a=b'), TDONE() ],97 self._tokenize(r'\=a=b') )98 self.assertEqual( [ TSTRING('a=b='), TDONE() ],99 self._tokenize(r'a=b\=') )100 def test_escaped_space_inside_string(self):101 self.assertEqual( [ TSTRING('a =b'), TDONE() ],102 self._tokenize(r'a\ =b') )103 self.assertEqual( [ TSTRING(' a=b'), TDONE() ],104 self._tokenize(r'\ a=b') )105 self.assertEqual( [ TSTRING('a=b '), TDONE() ],106 self._tokenize(r'a=b\ ') )107 def xtest_delimiter_is_none(self):108 self.assertEqual( [ TSTRING('a =b'), TDONE() ],109 self._tokenize(r'a\ =b', delimiter = None) )110 self.assertEqual( [ TSTRING('a=b'), TDONE() ],111 self._tokenize(r'a=b', delimiter = None) )112 self.assertEqual( [ TSTRING('a'), TSPACE(), TSTRING('=b'), TDONE() ],113 self._tokenize(r'a =b', delimiter = None) )114 self.assertEqual( [ TSTRING('a'), TSPACE(), TSTRING('='), TSPACE(), TSTRING('b'), TDONE() ],115 self._tokenize(r'a = b', delimiter = None) )116 def test_new_line(self):117 self.assertEqual( [ TSTRING('a=foo'), TSPACE(' \n ', y = 2), TSTRING('b=bar', y = 2), TDONE(y = 2) ],118 self._tokenize('a=foo \n b=bar') )119 def test_escaped_new_line(self):120 self.assertEqual( [ TSTRING('a=f\no', y = 2), TDONE(y = 2) ],121 self._tokenize('a=f\\\no') )122 def test_keep_quotes(self):123 self.assertEqual( [ TSTRING('a=foo bar baz'), TDONE() ],124 self._tokenize(r'a="foo bar baz"') )125 self.assertEqual( [ TSTRING('a="foo bar baz"'), TDONE() ],126 self._tokenize(r'a="foo bar baz"', keep_quotes = True) )127 def test_keep_quotes_escaped(self):128 self.assertEqual( [ TSTRING('a=foo bar baz'), TDONE() ],129 self._tokenize(r'a="foo bar baz"') )130 self.assertEqual( [ TSTRING('a=\\"foo bar baz\\"'), TDONE() ],131 self._tokenize(r'a="foo bar baz"', keep_quotes = True, escape_quotes = True) )132 def test_line_numbers(self):133 '''134 1: a=5135 2: b=6136 3: 137 4: c=7138 5:139 '''140 self.assertEqual( [ TSTRING('a=5'), TSPACE('\n', y = 2), 141 TSTRING('b=6', y = 2), TSPACE('\n\n', y = 4),142 TSTRING('c=7', y = 4), TSPACE('\n', y = 5), 143 TDONE(y = 5) ],144 self._tokenize('a=5\nb=6\n\nc=7\n') )145 def test_ignore_comments(self):146 self.assertEqual( [ TSTRING('foo'), TSPACE(), TSTRING('#bar'), TDONE() ],147 self._tokenize('foo #bar', ignore_comments = True) )148 149 @classmethod150 def _tokenize(self, text, delimiter = '=',151 keep_quotes = False,152 escape_quotes = False,153 ignore_comments = False):154 options = 0155 if keep_quotes:156 options |= string_lexer_options.KEEP_QUOTES157 if escape_quotes:158 options |= string_lexer_options.ESCAPE_QUOTES159 if ignore_comments:160 options |= string_lexer_options.IGNORE_COMMENTS161 return [ token for token in L.tokenize(text, delimiter, options = options) ]162 def assertEqual(self, expected, actual):163 assert isinstance(expected, list)164 expected = [ lexer_token(*t) for t in expected ]165 super(test_string_lexer, self).assertEqual(expected, actual)166if __name__ == "__main__":...

Full Screen

Full Screen

testparse.py

Source:testparse.py Github

copy

Full Screen

1import unittest2from parsecorrespondance import parse3class TestParsingFunctions(unittest.TestCase):4 def test_tokenize(self):5 self.assertEqual(parse.FrenchMapping._tokenize('LVF', 'L3b'), ['L3b'])6 self.assertEqual(parse.FrenchMapping._tokenize('LVF', 'L3b ou L3c'), ['L3b', 'or', 'L3c'])7 self.assertEqual(8 parse.FrenchMapping._tokenize('LVF', 'L3b ou(L3c)'),9 ['L3b', 'or', '(', 'L3c', ')'])10 self.assertEqual(parse.FrenchMapping._tokenize('LVF', '(L3b'), ['(', 'L3b'])11 self.assertEqual(parse.FrenchMapping._tokenize('LADL', '(37M2 ou 37M3 ou 37M4) et 32A)'), ['(', '37M2', 'or', '37M3', 'or', '37M4', ')', 'and', '32A', ')'])12 def test_nothing(self):13 self.assertEqual(parse.FrenchMapping('LADL', '-').infix(), '')14 self.assertEqual(parse.FrenchMapping('LADL', '?').infix(), '')15 self.assertEqual(parse.FrenchMapping('LADL', '').infix(), '')16 self.assertEqual(parse.FrenchMapping('LVF', '*').infix(), '')17 def test_syntaxerror(self):18 with self.assertRaises(parse.SyntaxErrorException):19 parse.FrenchMapping('LVF', 'C ou L3b et X4a')20 def test_fakename(self):21 with self.assertRaises(parse.UnknownClassException):22 parse.FrenchMapping('LVF', 'L6d')23 with self.assertRaises(parse.UnknownClassException):24 parse.FrenchMapping('LADL', '38LJ')25 def test_simple(self):26 self.assertEqual(parse.FrenchMapping('LVF', 'L3b').infix(), 'L3b')27 self.assertEqual(parse.FrenchMapping('LADL', '38LD').infix(), '38LD')28 def test_operator(self):29 self.assertEqual(parse.FrenchMapping('LVF', 'L3b ou X4a.2').infix(), '(or L3b X4a.2)')30 self.assertEqual(parse.FrenchMapping('LADL', '37M2 et 32A').infix(), '(and 37M2 32A)')31 def test_missing_operator(self):32 with self.assertRaises(parse.SyntaxErrorException):33 parse.FrenchMapping('LVF', 'S3c ou S3b S3a')34 def test_nestedoperators(self):35 self.assertEqual(36 parse.FrenchMapping('LVF', 'L3b ou (X4a.2 et X4a.1)').infix(),37 '(or L3b (and X4a.2 X4a.1))')38 self.assertEqual(39 parse.FrenchMapping('LADL', '32A et (37M2 ou 37M3 ou 37M4)').infix(),40 '(and 32A (or 37M2 37M3 37M4))')41 self.assertEqual(42 parse.FrenchMapping('LADL', '(37M2 ou 37M3 ou 37M4) et 32A)').infix(),43 '(and (or 37M2 37M3 37M4) 32A)')44 def test_flatparse(self):45 self.assertEqual(46 parse.FrenchMapping('LADL', '32A et (37M2 ou 37M3)').flat_parse(),47 [('32A', '32A'), ('et', None), ('(', None), ('37M2', '37M2'),48 ('ou', None), ('37M3', '37M3'), (')', None)])49 self.assertEqual(50 parse.FrenchMapping('LADL', '32C[+N1 être Vpp W]').flat_parse(),51 [('32C[+N1 être Vpp W]', '32C')])52class TestLVFColumns(unittest.TestCase):53 def test_parse(self):54 self.assertEqual(55 parse.FrenchMapping('LVF', 'L3b[+T1300]').parse_tree,56 {'leaf': ('L3b', [None, {'column': 'T1300', 'value': '+'}])})57 self.assertEqual(58 parse.FrenchMapping('LVF', 'P1i.2[-T14b0]').parse_tree,59 {'leaf': ('P1i.2', [None, {'column': 'T14b0', 'value': '-'}])})60class TestLADLColumns(unittest.TestCase):61 def test_tokenize(self):62 self.assertEqual(63 parse.FrenchMapping._tokenize('LADL', '38L[+N1 V W]'),64 ['38L', '[', '+', 'N1 V W', ']'])65 self.assertEqual(66 parse.FrenchMapping._tokenize('LADL', '38L[-N1 V W]'),67 ['38L', '[', '-', 'N1 V W', ']'])68 self.assertEqual(69 parse.FrenchMapping._tokenize('LADL', '36DT[+N2 détrimentaire]'),70 ['36DT', '[', '+', 'N2 détrimentaire', ']'])71 self.assertEqual(72 parse.FrenchMapping._tokenize('LADL', '38[+inexistant] et 22'),73 ['38', '[', '+', 'inexistant', ']', 'and', '22'])74 self.assertEqual(75 parse.FrenchMapping._tokenize('LADL', '38L[+V-n transport (forme V-n)]'),76 ['38L', '[', '+', 'V-n transport (forme V-n)', ']'])77 self.assertEqual(78 parse.FrenchMapping._tokenize('LADL', '38L[+V-n transport (forme V-n) et +N0 V]'),79 ['38L', '[', '+', 'V-n transport (forme V-n)', 'and', '+', 'N0 V', ']'])80 self.assertEqual(81 parse.FrenchMapping._tokenize('LADL', '35L[+[extrap]]'),82 ['35L', '[', '+', '[extrap]', ']'])83 self.assertEqual(84 parse.FrenchMapping._tokenize('LADL', '38R[Prép2=par]'),85 ['38R', '[', '=', 'Prép2', 'par', ']'])86 self.assertEqual(87 parse.FrenchMapping._tokenize('LADL', '15[Prép2=auprès de]'),88 ['15', '[', '=', 'Prép2', 'auprès de', ']'])89 self.assertEqual(90 parse.FrenchMapping._tokenize('LADL', '15[Prép2=auprès de et +N0 V]'),91 ['15', '[', '=', 'Prép2', 'auprès de', 'and' , '+', 'N0 V', ']'])92 self.assertEqual(93 parse.FrenchMapping._tokenize('LADL', '38R[Prép2=par et +N0 V]'),94 ['38R', '[', '=', 'Prép2', 'par', 'and', '+', 'N0 V', ']'])95 with self.assertRaises(parse.SyntaxErrorException):96 parse.FrenchMapping._tokenize('LADL', '38L[+V-n transport ou -N et +N0 V]'),97 with self.assertRaises(parse.SyntaxErrorException):98 parse.FrenchMapping._tokenize('LADL', '38L[+V-n transport ou N]'),99 with self.assertRaises(parse.SyntaxErrorException):100 parse.FrenchMapping._tokenize('LADL', '38R[Prép2 = par]'),101 with self.assertRaises(parse.SyntaxErrorException):102 parse.FrenchMapping._tokenize('LADL', '38R[Prép2= par]'),103 parse.FrenchMapping._tokenize('LADL', '38R[Prép2 =par]'),104 def test_operator_in_column_name(self):105 # The only two cases containing an 'or'106 self.assertEqual(107 parse.FrenchMapping._tokenize('LADL', '9[+N2 =: si P ou si P]'),108 ['9', '[', '+', 'N2 =: si P ou si P', ']'])109 self.assertEqual(110 parse.FrenchMapping._tokenize('LADL', '15[-N1 =: si P ou si P]'),111 ['15', '[', '-', 'N1 =: si P ou si P', ']'])112 def test_infix(self):113 self.assertEqual(parse.FrenchMapping('LADL', '38LD[+A ou +B]').infix(), '38LD[+A ou +B]')114 def test_parse(self):115 self.assertEqual(116 parse.FrenchMapping('LADL', '36DT[+N2 détrimentaire]').parse_tree,117 {'leaf': ('36DT', [None, {'column': 'N2 détrimentaire', 'value': '+'}])})118 self.assertEqual(119 parse.FrenchMapping('LADL', '36DT[+N2 détrimentaire ou -N2 être V-n]').parse_tree,120 {'leaf': ('36DT', ['or', {'column': 'N2 détrimentaire', 'value': '+'}, {'column': 'N2 être V-n', 'value': '-'}]), })121 self.assertEqual(122 parse.FrenchMapping('LADL', '38R[Prép2=par]').parse_tree,123 {'leaf': ('38R', [None, {'column': 'Prép2', 'value': 'par'}]), })124 def test_flatparse(self):125 self.assertEqual(126 parse.FrenchMapping('LADL', '36DT[+N2 détrimentaire et -Ppv =: y]').flat_parse(),127 [('36DT[+N2 détrimentaire et -Ppv =: y]', '36DT')])128 def test_syntaxerror(self):129 with self.assertRaises(parse.SyntaxErrorException):130 parse.FrenchMapping('LADL', '38L [+N1 V W]') # extra space131 with self.assertRaises(parse.SyntaxErrorException):132 parse.FrenchMapping('LADL', '38L [-N1 V W]') # extra space133 with self.assertRaises(parse.SyntaxErrorException):134 parse.FrenchMapping('LADL', '38L[N1 V W]') # missing plus or minus135 with self.assertRaises(parse.SyntaxErrorException):136 parse.FrenchMapping('LADL', '38L[N1 V V]') # unknown column137 with self.assertRaises(parse.SyntaxErrorException):...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful