How to use _process_line method in autotest

Best Python code snippet using autotest_python

lexer_test.py

Source:lexer_test.py Github

copy

Full Screen

...120'''121class TestLexer(unittest.TestCase):122 def testTokenLength(self):123 lxr = lexer.Lexer(version=4)124 lxr._process_line(b'break')125 self.assertEqual(1, len(lxr._tokens))126 self.assertEqual(5, len(lxr._tokens[0]))127 def testTokenRepr(self):128 lxr = lexer.Lexer(version=4)129 lxr._process_line(b'break')130 self.assertEqual(1, len(lxr._tokens))131 self.assertIn('line 0', repr(lxr._tokens[0]))132 133 def testTokenMatches(self):134 lxr = lexer.Lexer(version=4)135 lxr._process_line(b'break')136 self.assertEqual(1, len(lxr._tokens))137 self.assertTrue(lxr._tokens[0].matches(lexer.TokKeyword(b'break')))138 self.assertTrue(lxr._tokens[0].matches(lexer.TokKeyword))139 self.assertFalse(lxr._tokens[0].matches(lexer.TokKeyword(b'and')))140 self.assertFalse(lxr._tokens[0].matches(lexer.TokSpace))141 142 def testWhitespace(self):143 lxr = lexer.Lexer(version=4)144 lxr._process_line(b' \n')145 self.assertEqual(2, len(lxr._tokens))146 self.assertEqual(4, len(lxr._tokens[0]))147 def testOneKeyword(self):148 lxr = lexer.Lexer(version=4)149 lxr._process_line(b'and\n')150 self.assertEqual(2, len(lxr._tokens))151 self.assertEqual(lexer.TokKeyword(b'and'), lxr._tokens[0])152 def testOneName(self):153 lxr = lexer.Lexer(version=4)154 lxr._process_line(b'android\n')155 self.assertEqual(2, len(lxr._tokens))156 self.assertEqual(lexer.TokName(b'android'), lxr._tokens[0])157 def testQuestionMarkPrint(self):158 lxr = lexer.Lexer(version=4)159 lxr._process_line(b'?\n')160 self.assertEqual(2, len(lxr._tokens))161 self.assertEqual(lexer.TokName(b'?'), lxr._tokens[0])162 def testOneLabel(self):163 lxr = lexer.Lexer(version=4)164 lxr._process_line(b'::foobar::\n')165 self.assertEqual(2, len(lxr._tokens))166 self.assertEqual(lexer.TokLabel(b'::foobar::'), lxr._tokens[0])167 168 def testThreeDots(self):169 lxr = lexer.Lexer(version=4)170 lxr._process_line(b'...\n')171 self.assertEqual(2, len(lxr._tokens))172 self.assertEqual(lexer.TokSymbol(b'...'), lxr._tokens[0])173 def testStringDoubleQuotes(self):174 lxr = lexer.Lexer(version=4)175 lxr._process_line(b'"abc def ghi and jkl"\n')176 self.assertEqual(2, len(lxr._tokens))177 self.assertEqual(lexer.TokString(b'abc def ghi and jkl'),178 lxr._tokens[0])179 def testStringSingleQuotes(self):180 lxr = lexer.Lexer(version=4)181 lxr._process_line(b"'abc def ghi and jkl'\n")182 self.assertEqual(2, len(lxr._tokens))183 self.assertEqual(lexer.TokString(b'abc def ghi and jkl'),184 lxr._tokens[0])185 def testStringMultipleLines(self):186 # TODO: Pico-8 doesn't allow multiline strings, so this probably187 # shouldn't either.188 lxr = lexer.Lexer(version=4)189 lxr._process_line(b'"abc def ghi \n')190 lxr._process_line(b'and jkl"\n')191 self.assertEqual(2, len(lxr._tokens))192 self.assertEqual(lexer.TokString(b'abc def ghi \nand jkl'),193 lxr._tokens[0])194 195 def testStringMultipleLinesPlusAToken(self):196 lxr = lexer.Lexer(version=4)197 lxr._process_line(b'"abc def ghi \nand jkl" and\n')198 self.assertEqual(4, len(lxr._tokens))199 self.assertEqual(lexer.TokString(b'abc def ghi \nand jkl'),200 lxr._tokens[0])201 self.assertEqual(lexer.TokKeyword(b'and'), lxr._tokens[2])202 def testStringEscapes(self):203 lxr = lexer.Lexer(version=4)204 lxr._process_line(b'"\\\n\\a\\b\\f\\n\\r\\t\\v\\\\\\"\\\'\\65"\n')205 self.assertEqual(2, len(lxr._tokens))206 self.assertEqual(lexer.TokString(b'\n\a\b\f\n\r\t\v\\"\'A'),207 lxr._tokens[0])208 def testComment(self):209 lxr = lexer.Lexer(version=4)210 lxr._process_line(b'-- comment text and stuff\n')211 self.assertEqual(2, len(lxr._tokens))212 self.assertEqual(lexer.TokComment(b'-- comment text and stuff'),213 lxr._tokens[0])214 def testCommentUnofficialDoubleSlash(self):215 lxr = lexer.Lexer(version=4)216 lxr._process_line(b'// comment text and stuff\n')217 self.assertEqual(2, len(lxr._tokens))218 self.assertEqual(lexer.TokComment(b'// comment text and stuff'),219 lxr._tokens[0])220 def testMultilineComment(self):221 lxr = lexer.Lexer(version=8)222 lxr._process_line(b'--[[comment text\nand "stuff\n]]\n')223 self.assertEqual(2, len(lxr._tokens))224 self.assertEqual(lexer.TokComment(b'--[[comment text\nand "stuff\n]]'),225 lxr._tokens[0])226 def testMultilineCommentNoLinebreaks(self):227 lxr = lexer.Lexer(version=8)228 lxr._process_line(b'--[[comment text and "stuff]]\n')229 self.assertEqual(2, len(lxr._tokens))230 self.assertEqual(lexer.TokComment(b'--[[comment text and "stuff]]'),231 lxr._tokens[0])232 def testMultilineCommentMultipleCalls(self):233 lxr = lexer.Lexer(version=8)234 lxr._process_line(b'--[[comment text\n')235 lxr._process_line(b'and "stuff\n')236 lxr._process_line(b']]\n')237 self.assertEqual(2, len(lxr._tokens))238 self.assertEqual(lexer.TokComment(b'--[[comment text\nand "stuff\n]]'),239 lxr._tokens[0])240 def testTokenAndComment(self):241 lxr = lexer.Lexer(version=4)242 lxr._process_line(b'and-- comment text and stuff\n')243 self.assertEqual(3, len(lxr._tokens))244 self.assertEqual(lexer.TokKeyword(b'and'),245 lxr._tokens[0])246 self.assertEqual(lexer.TokComment(b'-- comment text and stuff'),247 lxr._tokens[1])248 249 def testNumberInteger(self):250 lxr = lexer.Lexer(version=4)251 lxr._process_line(b'1234567890\n')252 self.assertEqual(2, len(lxr._tokens))253 self.assertEqual(lexer.TokNumber(b'1234567890'),254 lxr._tokens[0])255 def testNumberDecimal(self):256 lxr = lexer.Lexer(version=4)257 lxr._process_line(b'1.234567890\n')258 self.assertEqual(2, len(lxr._tokens))259 self.assertEqual(lexer.TokNumber(b'1.234567890'),260 lxr._tokens[0])261 def testNumberDecimalNoRightPart(self):262 lxr = lexer.Lexer(version=4)263 lxr._process_line(b'1.\n')264 self.assertEqual(2, len(lxr._tokens))265 self.assertEqual(lexer.TokNumber(b'1.'),266 lxr._tokens[0])267 def testNumberDecimalWithExp(self):268 lxr = lexer.Lexer(version=4)269 lxr._process_line(b'1.234567890e-6\n')270 self.assertEqual(2, len(lxr._tokens))271 self.assertEqual(lexer.TokNumber(b'1.234567890e-6'),272 lxr._tokens[0])273 274 def testNegatedNumber(self):275 lxr = lexer.Lexer(version=4)276 lxr._process_line(b'-1.234567890e-6\n')277 self.assertEqual(3, len(lxr._tokens))278 self.assertEqual(lexer.TokSymbol(b'-'),279 lxr._tokens[0])280 self.assertEqual(lexer.TokNumber(b'1.234567890e-6'),281 lxr._tokens[1])282 def testNumberHex(self):283 lxr = lexer.Lexer(version=4)284 lxr._process_line(b'0x1234567890abcdef\n')285 self.assertEqual(2, len(lxr._tokens))286 self.assertEqual(lexer.TokNumber(b'0x1234567890abcdef'),287 lxr._tokens[0])288 def testNumberHexWithFrac(self):289 lxr = lexer.Lexer(version=4)290 lxr._process_line(b'0x1234567890abcdef.1bbf\n')291 self.assertEqual(2, len(lxr._tokens))292 self.assertEqual(lexer.TokNumber(b'0x1234567890abcdef.1bbf'),293 lxr._tokens[0])294 def testNumberBinary(self):295 lxr = lexer.Lexer(version=4)296 lxr._process_line(b'0b01101101\n')297 self.assertEqual(2, len(lxr._tokens))298 self.assertEqual(lexer.TokNumber(b'0b01101101'),299 lxr._tokens[0])300 def testNumberBinaryWithFrac(self):301 lxr = lexer.Lexer(version=4)302 lxr._process_line(b'0b01101101.0011\n')303 self.assertEqual(2, len(lxr._tokens))304 self.assertEqual(lexer.TokNumber(b'0b01101101.0011'),305 lxr._tokens[0])306 def testNumberValueDecimal(self):307 lxr = lexer.Lexer(version=4)308 lxr._process_line(b'123.456\n')309 self.assertEqual(123.456, lxr._tokens[0].value)310 def testNumberValueDecimalNoRightPart(self):311 lxr = lexer.Lexer(version=4)312 lxr._process_line(b'123.\n')313 self.assertEqual(123, lxr._tokens[0].value)314 def testNumberValueDecimalWithE(self):315 lxr = lexer.Lexer(version=4)316 lxr._process_line(b'1.234567890e-6\n')317 self.assertEqual(1.23456789e-6, lxr._tokens[0].value)318 def testNumberValueHexInteger(self):319 lxr = lexer.Lexer(version=4)320 lxr._process_line(b'0xae\n')321 self.assertEqual(174, lxr._tokens[0].value)322 def testNumberValueHexFraction(self):323 lxr = lexer.Lexer(version=4)324 lxr._process_line(b'0xae.bc\n')325 self.assertAlmostEqual(174.734, lxr._tokens[0].value, 3)326 def testNumberValueBinaryInteger(self):327 lxr = lexer.Lexer(version=4)328 lxr._process_line(b'0b01101101\n')329 self.assertEqual(109, lxr._tokens[0].value)330 def testNUmberValueBinaryFraction(self):331 lxr = lexer.Lexer(version=4)332 lxr._process_line(b'0b01101101.0011\n')333 self.assertAlmostEqual(109.1875, lxr._tokens[0].value, 3)334 def testMultilineString(self):335 lxr = lexer.Lexer(version=4)336 lxr._process_line(b'[[one\n')337 lxr._process_line(b'"two"\n')338 lxr._process_line(b'[[three]]\n')339 self.assertEqual(2, len(lxr._tokens))340 self.assertEqual(lexer.TokString(b'one\n"two"\n[[three'),341 lxr._tokens[0])342 def testMultilineStringMatchedEquals(self):343 lxr = lexer.Lexer(version=4)344 lxr._process_line(b'[===[one\n')345 lxr._process_line(b'[[two]]\n')346 lxr._process_line(b'[==[three]==]]===]\n')347 self.assertEqual(2, len(lxr._tokens))348 self.assertEqual(lexer.TokString(b'one\n[[two]]\n[==[three]==]'),349 lxr._tokens[0])350 def testValidLuaNoErrors(self):351 lxr = lexer.Lexer(version=4)352 for line in VALID_LUA.split(b'\n'):353 lxr._process_line(line)354 tokens = lxr.tokens355 self.assertEqual(lexer.TokName(b'v1'), tokens[0])356 self.assertEqual(lexer.TokSpace(b' '), tokens[1])357 self.assertEqual(lexer.TokSymbol(b'='), tokens[2])358 self.assertEqual(lexer.TokSpace(b' '), tokens[3])359 self.assertEqual(lexer.TokKeyword(b'nil'), tokens[4])360 def testLexerError(self):361 lxr = lexer.Lexer(version=4)362 try:363 lxr._process_line(b'123 @ 456')364 self.fail()365 except lexer.LexerError as e:366 txt = str(e) # coverage test367 self.assertEqual(1, e.lineno)368 self.assertEqual(5, e.charno)369 def testProcessLines(self):370 lxr = lexer.Lexer(version=4)371 lxr.process_lines([372 b'function foo()\n',373 b' return 999\n',374 b'end\n'375 ])376 self.assertEqual(13, len(lxr._tokens))377 def testProcessLinesErrorOnOpenString(self):...

Full Screen

Full Screen

file_reader.py

Source:file_reader.py Github

copy

Full Screen

...74 for i, line in enumerate(data):75 if '*SLANS*' in line:76 dict['TITLE'].append(line)77 if 'CAVITY RADIUS' in line:78 dict['CAVITY RADIUS'].append(self._process_line(line))79 if 'FREQUENCY' in line:80 dict['FREQUENCY'].append(self._process_line(line))81 if 'LENGTH OF WAVE' in line:82 dict['LENGTH OF WAVE'].append(self._process_line(line))83 if 'WAVE VALUE' in line:84 dict['WAVE VALUE'].append(self._process_line(line))85 if 'QUALITY FACTOR' in line:86 dict['QUALITY FACTOR'].append(self._process_line(line))87 if 'STORED ENERGY' in line:88 dict['STORED ENERGY'].append(self._process_line(line))89 if 'TRANSIT TIME' in line:90 dict['TRANSIT TIME'].append(self._process_line(line))91 if 'EFFECTIVE IMPEDANCE' in line:92 dict['EFFECTIVE IMPEDANCE'].append(self._process_line(line))93 if 'SHUNT IMPEDANCE' in line:94 dict['SHUNT IMPEDANCE'].append(self._process_line(line))95 if 'MAXIMUM MAG. FIELD' in line:96 dict['MAXIMUM MAG. FIELD'].append(self._process_line(line))97 if 'MAXIMUM ELEC.FIELD' in line:98 dict['MAXIMUM ELEC. FIELD'].append(self._process_line(line))99 if 'ACCELERATION' in line and not 'RATE' in line:100 dict['ACCELERATION'].append(self._process_line(line))101 if 'ACCELERATION RATE' in line:102 dict['ACCELERATION RATE'].append(self._process_line(line))103 if 'AVERAGE E.FIELD ON AXIS' in line:104 dict['AVERAGE E.FIELD ON AXIS'].append(self._process_line(line))105 if 'KM (Emax/Accel.rate)' in line:106 dict['KM (Emax/Accel.rate)'].append(self._process_line(line))107 if 'KH (Hmax*Z0/Accel.rate)' in line:108 dict['KH (Hmax*Z0/Accel.rate)'].append(self._process_line(line))109 return dict110 def top_reader(self):111 pass112 def json_reader(self, dir, header=None):113 df = pd.read_json(dir)114 if header:115 # check if length of header list is same as column length116 if len(header) == len(list(df.columns)):117 df.columns = header118 else:119 print(f'Expected header length of {len(list(df.columns))}, got {len(header)}.')120 return df121 def pam_reader(self):122 pass123 def _process_line(self, line):124 line = line.strip().split(' ')125 res = 0126 for val in line:127 try:128 res = float(val)129 break130 except:131 continue132 return res133 def _combineDict(self, args):134 d1 = json.load(open('Results/population.json', 'r'))135 d2 = json.load(open('Results/population2.json', 'r'))136 d3 = json.load(open('Results/population3.json', 'r'))137 d1.update(d2)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful