How to use process_tokens method in Kiwi

Best Python code snippet using Kiwi_python

unittest_checker_format.py

Source:unittest_checker_format.py Github

copy

Full Screen

...98 )99 with self.assertNoMessages():100 self.visitFirst(tree)101 def visitFirst(self, tree):102 self.checker.process_tokens([])103 self.checker.visit_default(tree.body[0])104 def test_ellipsis_is_ignored(self):105 code = """106 from typing import overload107 @overload108 def concat2(arg1: str) -> str: ...109 """110 tree = astroid.extract_node(code)111 with self.assertNoMessages():112 self.visitFirst(tree)113 code = """114 def concat2(arg1: str) -> str: ...115 """116 stmt = astroid.extract_node(code)117 with self.assertAddsMessages(Message("multiple-statements", node=stmt.body[0])):118 self.visitFirst(stmt)119class TestSuperfluousParentheses(CheckerTestCase):120 CHECKER_CLASS = FormatChecker121 def testCheckKeywordParensHandlesValidCases(self):122 self.checker._keywords_with_parens = set()123 cases = [124 "if foo:",125 "if foo():",126 "if (x and y) or z:",127 "assert foo()",128 "assert ()",129 "if (1, 2) in (3, 4):",130 "if (a or b) in c:",131 "return (x for x in x)",132 "if (x for x in x):",133 "for x in (x for x in x):",134 "not (foo or bar)",135 "not (foo or bar) and baz",136 ]137 with self.assertNoMessages():138 for code in cases:139 self.checker._check_keyword_parentheses(_tokenize_str(code), 0)140 def testCheckKeywordParensHandlesUnnecessaryParens(self):141 self.checker._keywords_with_parens = set()142 cases = [143 (Message("superfluous-parens", line=1, args="if"), "if (foo):", 0),144 (Message("superfluous-parens", line=1, args="if"), "if ((foo, bar)):", 0),145 (Message("superfluous-parens", line=1, args="if"), "if (foo(bar)):", 0),146 (147 Message("superfluous-parens", line=1, args="return"),148 "return ((x for x in x))",149 0,150 ),151 (Message("superfluous-parens", line=1, args="not"), "not (foo)", 0),152 (Message("superfluous-parens", line=1, args="not"), "if not (foo):", 1),153 (Message("superfluous-parens", line=1, args="if"), "if (not (foo)):", 0),154 (Message("superfluous-parens", line=1, args="not"), "if (not (foo)):", 2),155 (156 Message("superfluous-parens", line=1, args="for"),157 "for (x) in (1, 2, 3):",158 0,159 ),160 (161 Message("superfluous-parens", line=1, args="if"),162 "if (1) in (1, 2, 3):",163 0,164 ),165 ]166 for msg, code, offset in cases:167 with self.assertAddsMessages(msg):168 self.checker._check_keyword_parentheses(_tokenize_str(code), offset)169 def testCheckIfArgsAreNotUnicode(self):170 self.checker._keywords_with_parens = set()171 cases = [("if (foo):", 0), ("assert (1 == 1)", 0)]172 for code, offset in cases:173 self.checker._check_keyword_parentheses(_tokenize_str(code), offset)174 got = self.linter.release_messages()175 assert isinstance(got[-1].args, str)176 def testFuturePrintStatementWithoutParensWarning(self):177 code = """from __future__ import print_function178print('Hello world!')179"""180 tree = astroid.parse(code)181 with self.assertNoMessages():182 self.checker.process_module(tree)183 self.checker.process_tokens(_tokenize_str(code))184 def testKeywordParensFalsePositive(self):185 self.checker._keywords_with_parens = set()186 code = "if 'bar' in (DICT or {}):"187 with self.assertNoMessages():188 self.checker._check_keyword_parentheses(_tokenize_str(code), start=2)189class TestCheckSpace(CheckerTestCase):190 CHECKER_CLASS = FormatChecker191 def testParenthesesGood(self):192 good_cases = ["(a)\n", "(a * (b + c))\n", "(#\n a)\n"]193 with self.assertNoMessages():194 for code in good_cases:195 self.checker.process_tokens(_tokenize_str(code))196 def testParenthesesBad(self):197 with self.assertAddsMessages(198 Message(199 "bad-whitespace",200 line=1,201 args=("No", "allowed", "after", "bracket", "( a)\n^"),202 )203 ):204 self.checker.process_tokens(_tokenize_str("( a)\n"))205 with self.assertAddsMessages(206 Message(207 "bad-whitespace",208 line=1,209 args=("No", "allowed", "before", "bracket", "(a )\n ^"),210 )211 ):212 self.checker.process_tokens(_tokenize_str("(a )\n"))213 with self.assertAddsMessages(214 Message(215 "bad-whitespace",216 line=1,217 args=("No", "allowed", "before", "bracket", "foo (a)\n ^"),218 )219 ):220 self.checker.process_tokens(_tokenize_str("foo (a)\n"))221 with self.assertAddsMessages(222 Message(223 "bad-whitespace",224 line=1,225 args=("No", "allowed", "before", "bracket", "{1: 2} [1]\n ^"),226 )227 ):228 self.checker.process_tokens(_tokenize_str("{1: 2} [1]\n"))229 def testTrailingCommaGood(self):230 with self.assertNoMessages():231 self.checker.process_tokens(_tokenize_str("(a, )\n"))232 self.checker.process_tokens(_tokenize_str("(a,)\n"))233 self.checker.config.no_space_check = []234 with self.assertNoMessages():235 self.checker.process_tokens(_tokenize_str("(a,)\n"))236 @set_config(no_space_check=[])237 def testTrailingCommaBad(self):238 with self.assertAddsMessages(239 Message(240 "bad-whitespace",241 line=1,242 args=("No", "allowed", "before", "bracket", "(a, )\n ^"),243 )244 ):245 self.checker.process_tokens(_tokenize_str("(a, )\n"))246 def testComma(self):247 with self.assertAddsMessages(248 Message(249 "bad-whitespace",250 line=1,251 args=("No", "allowed", "before", "comma", "(a , b)\n ^"),252 )253 ):254 self.checker.process_tokens(_tokenize_str("(a , b)\n"))255 def testSpacesAllowedInsideSlices(self):256 good_cases = ["[a:b]\n", "[a : b]\n", "[a : ]\n", "[:a]\n", "[:]\n", "[::]\n"]257 with self.assertNoMessages():258 for code in good_cases:259 self.checker.process_tokens(_tokenize_str(code))260 def testKeywordSpacingGood(self):261 with self.assertNoMessages():262 self.checker.process_tokens(_tokenize_str("foo(foo=bar)\n"))263 self.checker.process_tokens(_tokenize_str("foo(foo: int = bar)\n"))264 self.checker.process_tokens(265 _tokenize_str("foo(foo: module.classname = bar)\n")266 )267 self.checker.process_tokens(268 _tokenize_str("foo(foo: Dict[int, str] = bar)\n")269 )270 self.checker.process_tokens(_tokenize_str("foo(foo: 'int' = bar)\n"))271 self.checker.process_tokens(272 _tokenize_str("foo(foo: Dict[int, 'str'] = bar)\n")273 )274 self.checker.process_tokens(_tokenize_str("lambda x=1: x\n"))275 def testKeywordSpacingBad(self):276 with self.assertAddsMessages(277 Message(278 "bad-whitespace",279 line=1,280 args=(281 "No",282 "allowed",283 "before",284 "keyword argument assignment",285 "(foo =bar)\n ^",286 ),287 )288 ):289 self.checker.process_tokens(_tokenize_str("(foo =bar)\n"))290 with self.assertAddsMessages(291 Message(292 "bad-whitespace",293 line=1,294 args=(295 "No",296 "allowed",297 "after",298 "keyword argument assignment",299 "(foo= bar)\n ^",300 ),301 )302 ):303 self.checker.process_tokens(_tokenize_str("(foo= bar)\n"))304 with self.assertAddsMessages(305 Message(306 "bad-whitespace",307 line=1,308 args=(309 "No",310 "allowed",311 "around",312 "keyword argument assignment",313 "(foo = bar)\n ^",314 ),315 )316 ):317 self.checker.process_tokens(_tokenize_str("(foo = bar)\n"))318 with self.assertAddsMessages(319 Message(320 "bad-whitespace",321 line=1,322 args=(323 "Exactly one",324 "required",325 "before",326 "keyword argument assignment",327 "(foo: int= bar)\n ^",328 ),329 )330 ):331 self.checker.process_tokens(_tokenize_str("(foo: int= bar)\n"))332 with self.assertAddsMessages(333 Message(334 "bad-whitespace",335 line=1,336 args=(337 "Exactly one",338 "required",339 "after",340 "keyword argument assignment",341 "(foo: int =bar)\n ^",342 ),343 )344 ):345 self.checker.process_tokens(_tokenize_str("(foo: int =bar)\n"))346 with self.assertAddsMessages(347 Message(348 "bad-whitespace",349 line=1,350 args=(351 "Exactly one",352 "required",353 "around",354 "keyword argument assignment",355 "(foo: int=bar)\n ^",356 ),357 )358 ):359 self.checker.process_tokens(_tokenize_str("(foo: int=bar)\n"))360 with self.assertAddsMessages(361 Message(362 "bad-whitespace",363 line=1,364 args=(365 "Exactly one",366 "required",367 "around",368 "keyword argument assignment",369 "(foo: List[int]=bar)\n ^",370 ),371 )372 ):373 self.checker.process_tokens(_tokenize_str("(foo: List[int]=bar)\n"))374 # Regression test for #1831375 with self.assertNoMessages():376 self.checker.process_tokens(377 _tokenize_str("(arg: Tuple[\n int, str] = None):\n")378 )379 def testOperatorSpacingGood(self):380 good_cases = ["a = b\n", "a < b\n", "a\n< b\n"]381 with self.assertNoMessages():382 for code in good_cases:383 self.checker.process_tokens(_tokenize_str(code))384 def testOperatorSpacingBad(self):385 with self.assertAddsMessages(386 Message(387 "bad-whitespace",388 line=1,389 args=("Exactly one", "required", "before", "comparison", "a< b\n ^"),390 )391 ):392 self.checker.process_tokens(_tokenize_str("a< b\n"))393 with self.assertAddsMessages(394 Message(395 "bad-whitespace",396 line=1,397 args=("Exactly one", "required", "after", "comparison", "a <b\n ^"),398 )399 ):400 self.checker.process_tokens(_tokenize_str("a <b\n"))401 with self.assertAddsMessages(402 Message(403 "bad-whitespace",404 line=1,405 args=("Exactly one", "required", "around", "comparison", "a<b\n ^"),406 )407 ):408 self.checker.process_tokens(_tokenize_str("a<b\n"))409 with self.assertAddsMessages(410 Message(411 "bad-whitespace",412 line=1,413 args=("Exactly one", "required", "around", "comparison", "a< b\n ^"),414 )415 ):416 self.checker.process_tokens(_tokenize_str("a< b\n"))417 def testValidTypingAnnotationEllipses(self):418 """Make sure ellipses in function typing annotation419 doesn't cause a false positive bad-whitespace message"""420 with self.assertNoMessages():421 self.checker.process_tokens(422 _tokenize_str("def foo(t: Tuple[str, ...] = None):\n")423 )424 def testEmptyLines(self):425 self.checker.config.no_space_check = []426 with self.assertAddsMessages(Message("trailing-whitespace", line=2)):427 self.checker.process_tokens(_tokenize_str("a = 1\n \nb = 2\n"))428 with self.assertAddsMessages(Message("trailing-whitespace", line=2)):429 self.checker.process_tokens(_tokenize_str("a = 1\n\t\nb = 2\n"))430 with self.assertAddsMessages(Message("trailing-whitespace", line=2)):431 self.checker.process_tokens(_tokenize_str("a = 1\n\v\nb = 2\n"))432 with self.assertNoMessages():433 self.checker.process_tokens(_tokenize_str("a = 1\n\f\nb = 2\n"))434 self.checker.config.no_space_check = ["empty-line"]435 with self.assertNoMessages():436 self.checker.process_tokens(_tokenize_str("a = 1\n \nb = 2\n"))437 with self.assertNoMessages():438 self.checker.process_tokens(_tokenize_str("a = 1\n\t\nb = 2\n"))439 with self.assertNoMessages():440 self.checker.process_tokens(_tokenize_str("a = 1\n\v\nb = 2\n"))441 def test_encoding_token(self):442 """Make sure the encoding token doesn't change the checker's behavior443 _tokenize_str doesn't produce an encoding token, but444 reading a file does445 """446 with self.assertNoMessages():447 encoding_token = tokenize.TokenInfo(448 tokenize.ENCODING, "utf-8", (0, 0), (0, 0), ""449 )450 tokens = [encoding_token] + _tokenize_str(451 "if (\n None):\n pass\n"452 )453 self.checker.process_tokens(tokens)454def test_disable_global_option_end_of_line():455 """456 Test for issue with disabling tokenizer messages457 that extend beyond the scope of the ast tokens458 """459 file_ = tempfile.NamedTemporaryFile("w", delete=False)460 with file_:461 file_.write(462 """463mylist = [464 None465 ]466 """467 )...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Kiwi automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful