How to use Lex method of parser Package

Best Syzkaller code snippet using parser.Lex

ast.go

Source:ast.go Github

copy

Full Screen

1package expr2import (3 "bytes"4 "fmt"5 "strconv"6 "strings"7 "text/scanner"8 "unicode"9)10const (11 TokenEOF = -(iota + 1)12 TokenIdent13 TokenInt14 TokenFloat15 TokenOperator16)17type lexer struct {18 scan scanner.Scanner19 token rune20 text string21}22func (lex *lexer) getToken() rune {23 return lex.token24}25func (lex *lexer) getText() string {26 return lex.text27}28func (lex *lexer) next() {29 token := lex.scan.Scan()30 text := lex.scan.TokenText()31 switch token {32 case scanner.EOF:33 lex.token = TokenEOF34 lex.text = text35 case scanner.Ident:36 lex.token = TokenIdent37 lex.text = text38 case scanner.Int:39 lex.token = TokenInt40 lex.text = text41 case scanner.Float:42 lex.token = TokenFloat43 lex.text = text44 case '+', '-', '*', '/', '%', '~':45 lex.token = TokenOperator46 lex.text = text47 case '&', '|', '=':48 var buffer bytes.Buffer49 lex.token = TokenOperator50 buffer.WriteRune(token)51 next := lex.scan.Peek()52 if next == token {53 buffer.WriteRune(next)54 lex.scan.Scan()55 }56 lex.text = buffer.String()57 case '>', '<', '!':58 var buffer bytes.Buffer59 lex.token = TokenOperator60 buffer.WriteRune(token)61 next := lex.scan.Peek()62 if next == '=' {63 buffer.WriteRune(next)64 lex.scan.Scan()65 }66 lex.text = buffer.String()67 default:68 if token >= 0 {69 lex.token = token70 lex.text = text71 } else {72 msg := fmt.Sprintf("got unknown token:%q, text:%s", lex.token, lex.text)73 panic(lexPanic(msg))74 }75 }76 //fmt.Printf("token:%d, text:%s\n", lex.token, lex.text)77}78type lexPanic string79// describe returns a string describing the current token, for use in errors.80func (lex *lexer) describe() string {81 switch lex.token {82 case TokenEOF:83 return "end of file"84 case TokenIdent:85 return fmt.Sprintf("identifier %s", lex.getText())86 case TokenInt, TokenFloat:87 return fmt.Sprintf("number %s", lex.getText())88 }89 return fmt.Sprintf("%q", rune(lex.getToken())) // any other rune90}91func precedence(token rune, text string) int {92 if token == TokenOperator {93 switch text {94 case "~", "!":95 return 996 case "*", "/", "%":97 return 898 case "+", "-":99 return 7100 case ">", ">=", "<", "<=":101 return 6102 case "!=", "==", "=":103 return 5104 case "&":105 return 4106 case "|":107 return 3108 case "&&":109 return 2110 case "||":111 return 1112 default:113 msg := fmt.Sprintf("unknown operator:%s", text)114 panic(lexPanic(msg))115 }116 }117 return 0118}119// ---- parser ----120type ExpressionParser struct {121 expression Expr122 variable map[string]struct{}123}124func NewExpressionParser() *ExpressionParser {125 return &ExpressionParser{126 expression: nil,127 variable: make(map[string]struct{}),128 }129}130// Parse parses the input string as an arithmetic expression.131//132// expr = num a literal number, e.g., 3.14159133// | id a variable name, e.g., x134// | id '(' expr ',' ... ')' a function call135// | '-' expr a unary operator ( + - ! )136// | expr '+' expr a binary operator ( + - * / && & || | == )137//138func (parser *ExpressionParser) Parse(input string) (err error) {139 defer func() {140 switch x := recover().(type) {141 case nil:142 // no panic143 case lexPanic:144 err = fmt.Errorf("%s", x)145 default:146 // unexpected panic: resume state of panic.147 panic(x)148 }149 }()150 lex := new(lexer)151 lex.scan.Init(strings.NewReader(input))152 lex.scan.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats153 lex.scan.IsIdentRune = parser.isIdentRune154 lex.next() // initial lookahead155 parser.expression = nil156 parser.variable = make(map[string]struct{})157 e := parser.parseExpr(lex)158 if lex.token != scanner.EOF {159 return fmt.Errorf("unexpected %s", lex.describe())160 }161 parser.expression = e162 return nil163}164func (parser *ExpressionParser) GetExpr() Expr {165 return parser.expression166}167func (parser *ExpressionParser) GetVariable() []string {168 variable := make([]string, 0, len(parser.variable))169 for v := range parser.variable {170 if v != "true" && v != "false" {171 variable = append(variable, v)172 }173 }174 return variable175}176func (parser *ExpressionParser) isIdentRune(ch rune, i int) bool {177 return ch == '$' || ch == '_' || unicode.IsLetter(ch) || unicode.IsDigit(ch) && i > 0178}179func (parser *ExpressionParser) parseExpr(lex *lexer) Expr {180 return parser.parseBinary(lex, 1)181}182// binary = unary ('+' binary)*183// parseBinary stops when it encounters an184// operator of lower precedence than prec1.185func (parser *ExpressionParser) parseBinary(lex *lexer, prec1 int) Expr {186 lhs := parser.parseUnary(lex)187 for prec := precedence(lex.getToken(), lex.getText()); prec >= prec1; prec-- {188 for precedence(lex.getToken(), lex.getText()) == prec {189 op := lex.getText()190 lex.next() // consume operator191 rhs := parser.parseBinary(lex, prec+1)192 lhs = binary{op, lhs, rhs}193 }194 }195 return lhs196}197// unary = '+' expr | primary198func (parser *ExpressionParser) parseUnary(lex *lexer) Expr {199 if lex.getToken() == TokenOperator {200 op := lex.getText()201 if op == "+" || op == "-" || op == "~" || op == "!" {202 lex.next()203 return unary{op, parser.parseUnary(lex)}204 } else {205 msg := fmt.Sprintf("unary got unknown operator:%s", lex.getText())206 panic(lexPanic(msg))207 }208 }209 return parser.parsePrimary(lex)210}211// primary = id212// | id '(' expr ',' ... ',' expr ')'213// | num214// | '(' expr ')'215func (parser *ExpressionParser) parsePrimary(lex *lexer) Expr {216 switch lex.token {217 case TokenIdent:218 id := lex.getText()219 lex.next()220 if lex.token != '(' {221 parser.variable[id] = struct{}{}222 return Var(id)223 }224 lex.next() // consume '('225 var args []Expr226 if lex.token != ')' {227 for {228 args = append(args, parser.parseExpr(lex))229 if lex.token != ',' {230 break231 }232 lex.next() // consume ','233 }234 if lex.token != ')' {235 msg := fmt.Sprintf("got %q, want ')'", lex.token)236 panic(lexPanic(msg))237 }238 }239 lex.next() // consume ')'240 return call{id, args}241 case TokenFloat:242 f, err := strconv.ParseFloat(lex.getText(), 64)243 if err != nil {244 panic(lexPanic(err.Error()))245 }246 lex.next() // consume number247 return literal{value: f}248 case TokenInt:249 i, err := strconv.ParseInt(lex.getText(), 10, 64)250 if err != nil {251 panic(lexPanic(err.Error()))252 }253 lex.next() // consume number254 return literal{value: i}255 case '(':256 lex.next() // consume '('257 e := parser.parseExpr(lex)258 if lex.token != ')' {259 msg := fmt.Sprintf("got %s, want ')'", lex.describe())260 panic(lexPanic(msg))261 }262 lex.next() // consume ')'263 return e264 }265 msg := fmt.Sprintf("unexpected %s", lex.describe())266 panic(lexPanic(msg))267}...

Full Screen

Full Screen

Lex

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 if len(args) != 1 {4 fmt.Println("Usage: go run 2.go <filename>")5 }6 p := lex.NewParser(filename)7 p.Lex()8}

Full Screen

Full Screen

Lex

Using AI Code Generation

copy

Full Screen

1import (2var (3 terminals = []string{"+", "-", "*", "/", "(", ")", "^", "sin", "cos", "tan", "sqrt", "log", "ln", "e", "pi", "x"}4 nonTerminals = []string{"S", "E", "F", "T", "G", "H", "A", "B", "C", "D", "R", "P", "Q", "U", "V", "W", "X", "Y", "Z"}5func main() {6 parser := Parser{}7 parser.Lex()8}9type Parser struct {10}11func (p *Parser) Lex() {12 p.stack = stack.New()13 p.resultStack = stack.New()14 p.tokenStack = stack.New()15 p.Read()16}17func (p *Parser) Read() {18 ui, err := tui.New(tui.NewHBox(19 tui.NewLabel("Enter the expression: "),20 tui.NewEntry(),21 if err != nil {22 log.Fatal(err)23 }24 entry := ui.Find("entry").(*tui.Entry)25 entry.OnSubmit(func(e *tui.Entry) {26 s := e.Text()27 p.Parse(s)28 })29 ui.SetKeybinding("Esc", func() { ui.Quit() })30 if err := ui.Run(); err != nil {31 log.Fatal(err)32 }33}34func (p *Parser) Parse

Full Screen

Full Screen

Lex

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 if len(os.Args) != 2 {4 fmt.Println("Usage: ./2 <input string>")5 os.Exit(1)6 }7 p := parser.NewParser(os.Args[1])8 for {9 tok := p.Lex()10 fmt.Println(tok)11 if tok == parser.EOF {12 }13 }14}

Full Screen

Full Screen

Lex

Using AI Code Generation

copy

Full Screen

1import (2var (3const (4const (5var toknames = []string{6}7var opnames = []string{8}9var errors = []string{10}11const (

Full Screen

Full Screen

Lex

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 fmt.Println("Enter the input string")4 reader := bufio.NewReader(os.Stdin)5 input, _ := reader.ReadString('\n')6 parser.Lex(input)7}

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful