How to use at method in Sure

Best Python code snippet using sure_python

sre_constants.py

Source:sre_constants.py Github

copy

Full Screen

1#2# Secret Labs' Regular Expression Engine3#4# various symbols used by the regular expression engine.5# run this script to update the _sre include files!6#7# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.8#9# See the sre.py file for information on usage and redistribution.10#11"""Internal support module for sre"""12# update when constants are added or removed13MAGIC = 2003101714# max code word in this release15MAXREPEAT = 6553516# SRE standard exception (access as sre.error)17# should this really be here?18class error(Exception):19 pass20# operators21FAILURE = "failure"22SUCCESS = "success"23ANY = "any"24ANY_ALL = "any_all"25ASSERT = "assert"26ASSERT_NOT = "assert_not"27AT = "at"28BIGCHARSET = "bigcharset"29BRANCH = "branch"30CALL = "call"31CATEGORY = "category"32CHARSET = "charset"33GROUPREF = "groupref"34GROUPREF_IGNORE = "groupref_ignore"35GROUPREF_EXISTS = "groupref_exists"36IN = "in"37IN_IGNORE = "in_ignore"38INFO = "info"39JUMP = "jump"40LITERAL = "literal"41LITERAL_IGNORE = "literal_ignore"42MARK = "mark"43MAX_REPEAT = "max_repeat"44MAX_UNTIL = "max_until"45MIN_REPEAT = "min_repeat"46MIN_UNTIL = "min_until"47NEGATE = "negate"48NOT_LITERAL = "not_literal"49NOT_LITERAL_IGNORE = "not_literal_ignore"50RANGE = "range"51REPEAT = "repeat"52REPEAT_ONE = "repeat_one"53SUBPATTERN = "subpattern"54MIN_REPEAT_ONE = "min_repeat_one"55# positions56AT_BEGINNING = "at_beginning"57AT_BEGINNING_LINE = "at_beginning_line"58AT_BEGINNING_STRING = "at_beginning_string"59AT_BOUNDARY = "at_boundary"60AT_NON_BOUNDARY = "at_non_boundary"61AT_END = "at_end"62AT_END_LINE = "at_end_line"63AT_END_STRING = "at_end_string"64AT_LOC_BOUNDARY = "at_loc_boundary"65AT_LOC_NON_BOUNDARY = "at_loc_non_boundary"66AT_UNI_BOUNDARY = "at_uni_boundary"67AT_UNI_NON_BOUNDARY = "at_uni_non_boundary"68# categories69CATEGORY_DIGIT = "category_digit"70CATEGORY_NOT_DIGIT = "category_not_digit"71CATEGORY_SPACE = "category_space"72CATEGORY_NOT_SPACE = "category_not_space"73CATEGORY_WORD = "category_word"74CATEGORY_NOT_WORD = "category_not_word"75CATEGORY_LINEBREAK = "category_linebreak"76CATEGORY_NOT_LINEBREAK = "category_not_linebreak"77CATEGORY_LOC_WORD = "category_loc_word"78CATEGORY_LOC_NOT_WORD = "category_loc_not_word"79CATEGORY_UNI_DIGIT = "category_uni_digit"80CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit"81CATEGORY_UNI_SPACE = "category_uni_space"82CATEGORY_UNI_NOT_SPACE = "category_uni_not_space"83CATEGORY_UNI_WORD = "category_uni_word"84CATEGORY_UNI_NOT_WORD = "category_uni_not_word"85CATEGORY_UNI_LINEBREAK = "category_uni_linebreak"86CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak"87OPCODES = [88 # failure=0 success=1 (just because it looks better that way :-)89 FAILURE, SUCCESS,90 ANY, ANY_ALL,91 ASSERT, ASSERT_NOT,92 AT,93 BRANCH,94 CALL,95 CATEGORY,96 CHARSET, BIGCHARSET,97 GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE,98 IN, IN_IGNORE,99 INFO,100 JUMP,101 LITERAL, LITERAL_IGNORE,102 MARK,103 MAX_UNTIL,104 MIN_UNTIL,105 NOT_LITERAL, NOT_LITERAL_IGNORE,106 NEGATE,107 RANGE,108 REPEAT,109 REPEAT_ONE,110 SUBPATTERN,111 MIN_REPEAT_ONE112]113ATCODES = [114 AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY,115 AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING,116 AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY,117 AT_UNI_NON_BOUNDARY118]119CHCODES = [120 CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE,121 CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD,122 CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD,123 CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT,124 CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD,125 CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK,126 CATEGORY_UNI_NOT_LINEBREAK127]128def makedict(list):129 d = {}130 i = 0131 for item in list:132 d[item] = i133 i = i + 1134 return d135OPCODES = makedict(OPCODES)136ATCODES = makedict(ATCODES)137CHCODES = makedict(CHCODES)138# replacement operations for "ignore case" mode139OP_IGNORE = {140 GROUPREF: GROUPREF_IGNORE,141 IN: IN_IGNORE,142 LITERAL: LITERAL_IGNORE,143 NOT_LITERAL: NOT_LITERAL_IGNORE144}145AT_MULTILINE = {146 AT_BEGINNING: AT_BEGINNING_LINE,147 AT_END: AT_END_LINE148}149AT_LOCALE = {150 AT_BOUNDARY: AT_LOC_BOUNDARY,151 AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY152}153AT_UNICODE = {154 AT_BOUNDARY: AT_UNI_BOUNDARY,155 AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY156}157CH_LOCALE = {158 CATEGORY_DIGIT: CATEGORY_DIGIT,159 CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,160 CATEGORY_SPACE: CATEGORY_SPACE,161 CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,162 CATEGORY_WORD: CATEGORY_LOC_WORD,163 CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,164 CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,165 CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK166}167CH_UNICODE = {168 CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,169 CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,170 CATEGORY_SPACE: CATEGORY_UNI_SPACE,171 CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,172 CATEGORY_WORD: CATEGORY_UNI_WORD,173 CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,174 CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,175 CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK176}177# flags178SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)179SRE_FLAG_IGNORECASE = 2 # case insensitive180SRE_FLAG_LOCALE = 4 # honour system locale181SRE_FLAG_MULTILINE = 8 # treat target as multiline string182SRE_FLAG_DOTALL = 16 # treat target as a single string183SRE_FLAG_UNICODE = 32 # use unicode locale184SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments185SRE_FLAG_DEBUG = 128 # debugging186# flags for INFO primitive187SRE_INFO_PREFIX = 1 # has prefix188SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)189SRE_INFO_CHARSET = 4 # pattern starts with character from given set190if __name__ == "__main__":191 def dump(f, d, prefix):192 items = d.items()193 items.sort(key=lambda a: a[1])194 for k, v in items:195 f.write("#define %s_%s %s\n" % (prefix, k.upper(), v))196 f = open("sre_constants.h", "w")197 f.write("""\198/*199 * Secret Labs' Regular Expression Engine200 *201 * regular expression matching engine202 *203 * NOTE: This file is generated by sre_constants.py. If you need204 * to change anything in here, edit sre_constants.py and run it.205 *206 * Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.207 *208 * See the _sre.c file for information on usage and redistribution.209 */210""")211 f.write("#define SRE_MAGIC %d\n" % MAGIC)212 dump(f, OPCODES, "SRE_OP")213 dump(f, ATCODES, "SRE")214 dump(f, CHCODES, "SRE")215 f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)216 f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)217 f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)218 f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)219 f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)220 f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)221 f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)222 f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)223 f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)224 f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)225 f.close()...

Full Screen

Full Screen

test_lexer.py

Source:test_lexer.py Github

copy

Full Screen

1from pugsql import lexer, context2from unittest import TestCase3ctx = context.Context('<literal>')4def at(line, col):5 return context.Context(ctx.sqlfile, line, col)6class LexTest(TestCase):7 def test_basic(self):8 l1 = lexer.Token('C', '-- :name username_for_id :1', at(1, 1))9 l2 = lexer.Token(10 'Q', 'select username from users where user_id = :user_id',11 at(2, 1))12 self.assertEqual(13 [l1, l2],14 lexer.lex(open('tests/sql/basic.sql', 'r').read(), ctx))15 def test_leading_comment_whitespace(self):16 l1 = lexer.Token('C', '-- :name username_for_id :1', at(1, 4))17 l2 = lexer.Token(18 'Q', 'select username from users where user_id = :user_id',19 at(2, 1))20 self.assertEqual(21 [l1, l2],22 lexer.lex(23 ' -- :name username_for_id :1\n'24 'select username from users where user_id = :user_id', ctx))25 def test_whitespace(self):26 l1 = lexer.Token('C', '-- :name username_for_id :1', at(1, 2))27 l2 = lexer.Token(28 'Q', 'select username from users where user_id = :user_id',29 at(2, 2))30 self.assertEqual(31 [l1, l2],32 lexer.lex(33 ' -- :name username_for_id :1 \n'34 ' select username from users where user_id = :user_id ', ctx))35 def test_blank_lines(self):36 l1 = lexer.Token('C', '-- :name username_for_id :1', at(1, 1))37 l2 = lexer.Token('Q', '', at(2, 1))38 l3 = lexer.Token(39 'Q', 'select username from users where user_id = :user_id',40 at(3, 1))41 self.assertEqual(42 [l1, l2, l3],43 lexer.lex(44 '-- :name username_for_id :1 \n'45 '\n'46 'select username from users where user_id = :user_id ', ctx))47class LexCommentTest(TestCase):48 def tok(self, comment):49 return lexer.Token('C', comment, at(1, 1))50 def test_no_keywords(self):51 self.assertIsNone(lexer.lex_comment(self.tok('-- foobar baz')))52 def test_not_a_comment(self):53 self.assertIsNone(lexer.lex_comment(self.tok('select 1')))54 def test_internal_keyword(self):55 self.assertIsNone(lexer.lex_comment(self.tok('-- stuff :foo bar')))56 def test_works(self):57 self.assertEqual({58 'keyword': lexer.Token('K', ':foo', at(1, 4)),59 'rest': lexer.Token('S', 'bar baz', at(1, 9)),60 }, lexer.lex_comment(self.tok('-- :foo bar baz')))61 def test_multiple_keywords(self):62 self.assertEqual({63 'keyword': lexer.Token('K', ':foo', at(1, 4)),64 'rest': lexer.Token('S', 'bar :baz', at(1, 9)),65 }, lexer.lex_comment(self.tok('-- :foo bar :baz')))66 def test_leading_whitespace(self):67 self.assertEqual({68 'keyword': lexer.Token('K', ':foo', at(1, 9)),69 'rest': lexer.Token('S', 'bar :baz', at(1, 14)),70 }, lexer.lex_comment(self.tok('-- :foo bar :baz')))71 def test_internal_whitespace(self):72 self.assertEqual({73 'keyword': lexer.Token('K', ':foo', at(1, 5)),74 'rest': lexer.Token('S', 'bar :baz', at(1, 12)),75 }, lexer.lex_comment(self.tok('-- :foo bar :baz')))76 def test_keyword_only(self):77 self.assertEqual({78 'keyword': lexer.Token('K', ':foo', at(1, 4)),79 'rest': lexer.Token('S', '', at(1, 8)),80 }, lexer.lex_comment(self.tok('-- :foo')))81 def test_no_space(self):82 self.assertEqual({83 'keyword': lexer.Token('K', ':foo', at(1, 3)),84 'rest': lexer.Token('S', '', at(1, 7)),85 }, lexer.lex_comment(self.tok('--:foo')))86class LexNameTest(TestCase):87 def tok(self, rest):88 return lexer.Token('S', rest, at(1, 1))89 def test_name_only(self):90 self.assertEqual({91 'name': lexer.Token('N', 'foo', at(1, 1)),92 'keyword': lexer.Token('K', None, at(1, 4)),93 'rest': lexer.Token('S', None, at(1, 4)),94 }, lexer.lex_name(self.tok('foo')))95 def test_name_rest_no_keyword(self):96 self.assertEqual({97 'name': lexer.Token('N', 'foo', at(1, 1)),98 'keyword': lexer.Token('K', None, at(1, 5)),99 'rest': lexer.Token('S', 'other stuff', at(1, 5)),100 }, lexer.lex_name(self.tok('foo other stuff')))101 def test_with_keyword(self):102 self.assertEqual({103 'name': lexer.Token('N', 'foo', at(1, 1)),104 'keyword': lexer.Token('K', ':bar', at(1, 5)),105 'rest': lexer.Token('S', None, at(1, 9)),106 }, lexer.lex_name(self.tok('foo :bar')))107 def test_with_rest(self):108 self.assertEqual({109 'name': lexer.Token('N', 'foo', at(1, 1)),110 'keyword': lexer.Token('K', ':bar', at(1, 5)),111 'rest': lexer.Token('S', 'other stuff', at(1, 10)),112 }, lexer.lex_name(self.tok('foo :bar other stuff')))113 def test_leading_whitespace(self):114 self.assertEqual({115 'name': lexer.Token('N', 'foo', at(1, 4)),116 'keyword': lexer.Token('K', ':bar', at(1, 8)),117 'rest': lexer.Token('S', 'other stuff', at(1, 13)),118 }, lexer.lex_name(self.tok(' foo :bar other stuff')))119 def test_trailing_whitespace(self):120 self.assertEqual({121 'name': lexer.Token('N', 'foo', at(1, 4)),122 'keyword': lexer.Token('K', ':bar', at(1, 8)),123 'rest': lexer.Token('S', 'other stuff', at(1, 13)),124 }, lexer.lex_name(self.tok(' foo :bar other stuff ')))125 def test_name_only_trailing_whitespace(self):126 self.assertEqual({127 'name': lexer.Token('N', 'foo', at(1, 1)),128 'keyword': lexer.Token('K', None, at(1, 4)),129 'rest': lexer.Token('S', None, at(1, 4)),130 }, lexer.lex_name(self.tok('foo ')))131 def test_with_keyword_trailing_whitespace(self):132 self.assertEqual({133 'name': lexer.Token('N', 'foo', at(1, 1)),134 'keyword': lexer.Token('K', ':bar', at(1, 5)),135 'rest': lexer.Token('S', None, at(1, 9)),136 }, lexer.lex_name(self.tok('foo :bar ')))137 def test_no_name(self):138 self.assertIsNone(lexer.lex_name(self.tok(' ')))139 def test_empty(self):140 self.assertIsNone(lexer.lex_name(self.tok('')))141class LexResultTest(TestCase):142 def tok(self, s):143 return lexer.Token('S', s, at(1, 1))144 def test_works(self):145 self.assertEqual({146 'keyword': lexer.Token('K', ':raw', at(1, 1)),147 'rest': lexer.Token('S', None, at(1, 5)),148 }, lexer.lex_result(self.tok(':raw')))149 def test_rest(self):150 self.assertEqual({151 'keyword': lexer.Token('K', ':raw', at(1, 1)),152 'rest': lexer.Token('S', ' stuff', at(1, 5)),153 }, lexer.lex_result(self.tok(':raw stuff')))154 def test_no_keyword(self):...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Sure automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful