How to use ErrorHandler method of ast Package

Best Syzkaller code snippet using ast.ErrorHandler

parser.go

Source:parser.go Github

copy

Full Screen

1package parser2import (3 "fmt"4 "io/ioutil"5 "os"6 "path/filepath"7 "github.com/cozees/cook/pkg/cook/ast"8 "github.com/cozees/cook/pkg/cook/token"9 cookErrors "github.com/cozees/cook/pkg/errors"10)11type Parser interface {12 Parse(file string) (ast.Cook, error)13 ParseSrc(file *token.File, src []byte) (ast.Cook, error)14}15func NewParser() Parser {16 return &parser{parsed: make(map[string]*token.File), pending: make(map[string]*token.File)}17}18type parser struct {19 pending map[string]*token.File // Cookfile which is waiting for parsing20 parsed map[string]*token.File // Cookfile which already parsed21 s *scanner22 tfile *token.File23 block *ast.BlockStatement24 cook ast.Cook25 // current token26 cOffs int27 cTok token.Token28 cLit string29 // ahead token by 1 step30 nOffs int31 nTok token.Token32 nLit string33 errs *cookErrors.CookError34}35func (p *parser) curPos() token.Position { return p.tfile.Position(p.cOffs) }36func (p *parser) errorHandler(pos token.Position, msg string, args ...interface{}) {37 if p.errs == nil {38 p.errs = &cookErrors.CookError{}39 }40 p.errs.StackError(fmt.Errorf(pos.String()+" "+msg, args...))41 // when encounter error immedate ignore everything until new statement42 for {43 p.next()44 switch p.cTok {45 case token.IDENT:46 if (token.ADD_ASSIGN <= p.nTok && p.nTok <= token.REM_ASSIGN) ||47 (token.AND_ASSIGN <= p.nTok && p.nTok <= token.ASSIGN) || p.nTok == token.LBRACK {48 // p.nTok == token.LBRACK, is false positive as it not guarantee to be the49 // index assigned statement.50 return51 }52 case token.FOR, token.IF, token.BREAK, token.CONTINUE, token.RETURN, token.EOF, token.COMMENT:53 return54 }55 }56}57func (p *parser) expect(require token.Token) (offs int) {58 if p.cTok != require {59 p.errorHandler(p.curPos(), fmt.Sprintf("expect %s but got %s", require, p.cTok))60 offs = -161 } else {62 offs = p.cOffs63 p.next()64 }65 return66}67func (p *parser) init(file *token.File, src []byte) (err error) {68 p.tfile = file69 if p.s, err = NewScannerSrc(file, src, p.errorHandler); err == nil {70 p.s.skipLineFeed = true71 p.cOffs, p.cTok, p.cLit = -1, 0, ""72 p.nOffs, p.nTok, p.nLit = p.s.Scan()73 }74 return err75}76func (p *parser) next() {77 p.cOffs, p.cTok, p.cLit = p.nOffs, p.nTok, p.nLit78 if p.nTok != token.EOF {79 p.nOffs, p.nTok, p.nLit = p.s.Scan()80 }81}82func (p *parser) Parse(file string) (ast.Cook, error) {83 stat, err := os.Stat(file)84 if err != nil {85 return nil, err86 }87 tfile := token.NewFile(file, int(stat.Size()))88 src, err := ioutil.ReadFile(file)89 if err != nil {90 return nil, err91 }92 return p.ParseSrc(tfile, src)93}94func (p *parser) ParseSrc(file *token.File, src []byte) (ast.Cook, error) {95 if err := p.init(file, src); err == nil {96 p.cook = ast.NewCook()97 p.block = p.cook.Block()98 return p.parse()99 } else {100 return nil, err101 }102}103func (p *parser) parse() (cook ast.Cook, err error) {104 // scan include directive first105nextFile:106 for p.next(); p.cTok != token.EOF; {107 if p.cTok == token.INCLUDE {108 p.parseIncludeDirective()109 continue110 }111 break112 }113 for p.cTok != token.EOF {114 switch p.cTok {115 case token.INCLUDE:116 p.errorHandler(p.curPos(), "include directive must place at the very top of the file.")117 case token.IDENT:118 p.parseIdentifier(true)119 case token.FOR:120 p.parseForLoop(false)121 case token.IF:122 p.parseIf(false, nil)123 case token.AT, token.HASH:124 p.parseCallReference(false, nil)125 case token.EXIT:126 offs := p.cOffs127 if code := p.parseBinaryExpr(false, token.LowestPrec+1); code != nil {128 p.block.Append(&ast.ExprWrapperStatement{129 X: &ast.Exit{Base: &ast.Base{Offset: offs, File: p.tfile}, ExitCode: code},130 })131 }132 p.expect(token.LF)133 case token.COMMENT:134 p.next()135 // eat the comment for now.136 // TODO: add comment to file which help when formatting the code137 default:138 p.errorHandler(p.curPos(), "invalid token %s", p.cTok)139 }140 }141 // check if there more file pending to parse142 if len(p.pending) > 0 {143 p.parsed[p.tfile.Name()] = p.tfile144 for k, v := range p.pending {145 if src, err := ioutil.ReadFile(v.Name()); err != nil {146 return nil, err147 } else if err := p.init(v, src); err != nil {148 return nil, err149 }150 p.tfile = v151 delete(p.pending, k)152 break153 }154 p.block = p.cook.Block()155 goto nextFile156 }157 if p.errs != nil {158 // return error stack159 return nil, p.errs160 } else {161 return p.cook, nil162 }163}164func (p *parser) parseIncludeDirective() {165 p.next()166 if p.cTok == token.STRING {167 _, ok1 := p.parsed[p.cLit]168 _, ok2 := p.pending[p.cLit]169 if ok1 || ok2 {170 // file have already be parsed, nothing to do here.171 return172 }173 file := p.cLit174 p.next()175 if p.expect(token.LF) == -1 {176 return177 }178 // new file179 ifile := filepath.Join(filepath.Dir(p.s.file.Name()), file)180 if stat, err := os.Stat(ifile); err != nil {181 if os.IsNotExist(err) {182 p.errorHandler(p.curPos(), "included file %s not found", ifile)183 } else {184 p.errorHandler(p.curPos(), "unable to read included file %s ", ifile)185 }186 } else {187 p.pending[p.cLit] = token.NewFile(ifile, int(stat.Size()))188 }189 } else {190 p.errorHandler(p.curPos(), "include directive expected string")191 }192}193func (p *parser) parseIdentifier(head bool) {194 switch p.nTok {195 case token.COLON:196 p.parseTarget()197 case token.LPAREN:198 // function declaration or calling a function199 p.parseDeclareFunction(false)200 case token.LBRACK:201 // index expression202 if x := p.parseIndexExpression(); x != nil {203 p.parseAssignStatement(x)204 }205 case token.LBRACE:206 // slice or delete207 if head {208 p.next()209 p.errorHandler(p.curPos(), "unexpected %s", p.cTok)210 return211 }212 case token.INC, token.DEC:213 offs, lit := p.cOffs, p.cLit214 p.next()215 p.block.Append(&ast.ExprWrapperStatement{216 X: &ast.IncDec{217 Op: p.cTok,218 X: &ast.Ident{Base: &ast.Base{Offset: offs, File: p.tfile}, Name: lit},219 },220 })221 p.next()222 p.expect(token.LF)223 default:224 settable := &ast.Ident{Base: &ast.Base{Offset: p.cOffs, File: p.tfile}, Name: p.cLit}225 p.parseAssignStatement(settable)226 if p.cTok == token.LF {227 p.next()228 }229 }230}231func (p *parser) parseTarget() {232 offs, name := p.cOffs, p.cLit233 p.next()234 if t, err := p.cook.AddTarget(&ast.Base{File: p.tfile, Offset: offs}, name); err != nil {235 p.errorHandler(p.curPos(), err.Error())236 } else if p.next(); name == "all" && p.cTok == token.MUL {237 t.SetCallAll()238 p.next()239 } else {240 p.block = t.Insts241 }242}243func (p *parser) parseAssignStatement(settableNode ast.SettableNode) {244 offs := p.cOffs245 p.next()246 op := p.cTok247 switch {248 case p.cTok == token.ASSIGN:249 fallthrough250 case token.ADD_ASSIGN <= p.cTok && p.cTok <= token.REM_ASSIGN:251 fallthrough252 case token.AND_ASSIGN <= p.cTok && p.cTok <= token.AND_NOT_ASSIGN:253 assignStmt := &ast.AssignStatement{254 Base: &ast.Base{Offset: offs, File: p.tfile},255 Op: op,256 Ident: settableNode,257 }258 if p.nTok == token.AT || p.nTok == token.HASH {259 p.next()260 assignStmt.Value = p.parseCallReference(true, nil)261 } else {262 assignStmt.Value = p.parseBinaryExpr(false, token.LowestPrec+1)263 }264 if p.cTok == token.LF {265 // newline is option on assign statement266 p.next()267 }268 p.block.Append(assignStmt)269 default:270 p.errorHandler(p.curPos(), "unexpected %s", p.cTok)271 }272}273func (p *parser) parseBinaryExpr(isChaining bool, priority int) ast.Node {274 p.next()275 if p.cTok == token.IDENT {276 if p.nTok == token.LPAREN {277 return p.parseTransformation()278 } else if p.nTok == token.EXISTS {279 offs, lit := p.cOffs, p.cLit280 p.next()281 p.next()282 base := &ast.Base{Offset: offs, File: p.tfile}283 return &ast.Exists{Base: base, X: &ast.Ident{Base: base, Name: lit}}284 }285 }286 x := p.parseUnaryExpr()287 if isChaining && p.cTok.IsComparison() {288 return x289 } else if p.cTok == token.EXISTS {290 }291 var prevNode ast.Node292 for {293 op, oprec := p.cTok, p.cTok.Precedence()294 if oprec < priority || isChaining && op.IsComparison() {295 return x296 }297 // check for chaining comparision298 // <, ≤ (<=), >, ≥ (>=), ≠ (!=), ==, is299 // special case for is, ternary and fallback expression300 if op == token.QES {301 // ternary case or short if302 x = p.parseTernaryExpr(x)303 } else if op == token.DQS {304 // fallback expression305 x = p.parseFallbackExpr(x)306 } else if op == token.IS {307 x = p.parseIsExpr(x)308 } else {309 offs := p.cOffs310 isComp := op.IsComparison()311 y := p.parseBinaryExpr(isChaining || isComp, oprec+1)312 ly := y313 if prevNode != nil {314 y = &ast.Binary{315 Base: &ast.Base{Offset: offs, File: p.tfile},316 Op: op,317 L: prevNode,318 R: y,319 }320 op = token.LAND321 prevNode = nil322 }323 x = &ast.Binary{324 Base: &ast.Base{Offset: offs, File: p.tfile},325 Op: op,326 L: x,327 R: y,328 }329 if isComp && p.cTok.IsComparison() {330 prevNode = ly331 }332 }333 }334}335func (p *parser) parseUnaryExpr() (x ast.Node) {336 switch p.cTok {337 case token.ADD, token.SUB, token.NOT, token.XOR, token.FD:338 offs, op := p.cOffs, p.cTok339 p.next()340 opr, _ := p.parseOperand()341 if p.cTok == token.EXISTS && op == token.FD {342 p.next()343 x = &ast.Exists{344 Base: &ast.Base{Offset: offs, File: p.tfile},345 Op: op,346 X: opr,347 }348 } else {349 x = &ast.Unary{350 Base: &ast.Base{Offset: offs, File: p.tfile},351 Op: op,352 X: opr,353 }354 }355 case token.SIZEOF:356 offs := p.cOffs357 p.next()358 var opr ast.Node359 if p.cTok == token.FD {360 opr = p.parseUnaryExpr()361 } else {362 opr, _ = p.parseOperand()363 }364 x = &ast.SizeOf{365 Base: &ast.Base{Offset: offs, File: p.tfile},366 X: opr,367 }368 case token.VAR:369 offs := p.cOffs370 p.next()371 lit := p.cLit372 if p.expect(token.INTEGER) != -1 {373 x = &ast.Ident{Base: &ast.Base{Offset: offs, File: p.tfile}, Name: lit}374 }375 case token.TINTEGER, token.TFLOAT, token.TSTRING, token.TBOOLEAN:376 x = p.parseTypeCaseExpr()377 case token.ON:378 offs := p.cOffs379 p.next()380 switch p.cTok {381 case token.LINUX, token.MACOS, token.WINDOWS:382 x = &ast.OSysCheck{Base: &ast.Base{Offset: offs, File: p.tfile}, OS: p.cTok}383 p.next()384 default:385 p.errorHandler(p.curPos(), "expect operating system keyword got %s (%s)", p.cTok, p.cLit)386 return nil387 }388 case token.HASH, token.AT:389 op := p.cTok390 base := &ast.Base{Offset: p.cOffs, File: p.tfile}391 if p.next(); p.cTok == token.IDENT {392 soffs, tok, lit := p.cOffs, p.cTok, p.cLit393 if p.next(); p.cTok != token.EXISTS {394 p.errorHandler(p.curPos(), "expect %s but got %s", token.EXISTS, p.cTok)395 return nil396 }397 if tok == token.STRING {398 x = &ast.BasicLit{Base: base, Lit: lit, Kind: tok, Mark: p.s.src[soffs-1]}399 } else {400 x = &ast.Ident{Base: base, Name: lit}401 }402 } else {403 p.errorHandler(p.curPos(), "expect identifier or file expression but got %s", p.cTok)404 return nil405 }406 x = &ast.Exists{Base: base, Op: op, X: x}407 p.next()408 default:409 offs := p.cOffs410 x, _ = p.parseOperand()411 if p.cTok == token.EXISTS {412 p.next()413 x = &ast.Exists{Base: &ast.Base{Offset: offs, File: p.tfile}, X: x}414 }415 }416 return417}418func (p *parser) parseOperand() (x ast.Node, kind token.Token) {419 switch p.cTok {420 case token.IDENT:421 if p.nTok == token.LBRACK {422 x = p.parseIndexExpression()423 } else {424 offs := p.cOffs425 x = &ast.Ident{Base: &ast.Base{Offset: offs, File: p.tfile}, Name: p.cLit}426 }427 p.next()428 kind = token.IDENT429 case token.INTEGER, token.FLOAT, token.STRING, token.BOOLEAN:430 if p.cTok == token.STRING {431 x = &ast.BasicLit{Base: &ast.Base{Offset: p.cOffs, File: p.tfile}, Lit: p.cLit, Kind: p.cTok, Mark: p.s.src[p.cOffs-1]}432 } else {433 x = &ast.BasicLit{Base: &ast.Base{Offset: p.cOffs, File: p.tfile}, Lit: p.cLit, Kind: p.cTok}434 }435 kind = p.cTok436 p.next()437 case token.STRING_ITP:438 x, kind = p.parseStringInterpolation(), token.STRING439 case token.LPAREN:440 lparen := p.cOffs441 inx := p.parseBinaryExpr(false, token.LowestPrec+1) // types may be parenthesized: (some type)442 if p.expect(token.RPAREN) == -1 {443 return nil, 0444 }445 kind = token.LPAREN446 x = &ast.Paren{Base: &ast.Base{Offset: lparen, File: p.tfile}, Inner: inx}447 case token.LBRACE:448 x, kind = p.parseMapLiteral(), token.MAP449 case token.LBRACK:450 x, kind = p.parserArrayLiteral(), token.ARRAY451 default:452 p.errorHandler(p.curPos(), fmt.Sprintf("invalid token %s", p.cTok))453 }454 return455}456func (p *parser) parseStringInterpolation() ast.Node {457 offs := p.cOffs458 sib := ast.NewStringInterpolationBuilder(p.s.src[offs-1])459 for {460 switch {461 case p.cTok == token.STRING_ITP:462 sib.WriteString(p.cLit)463 p.next()464 if p.cTok != token.VAR {465 return sib.Build(offs, p.tfile)466 }467 p.next()468 case p.cTok == token.VAR:469 p.next()470 default:471 return sib.Build(offs, p.tfile)472 }473 // require following to a variable or an expression474 switch p.cTok {475 case token.IDENT:476 sib.AddExpression(&ast.Ident{Base: &ast.Base{Offset: p.cOffs, File: p.tfile}, Name: p.cLit})477 p.next()478 case token.LBRACE:479 x := p.parseBinaryExpr(false, token.LowestPrec+1)480 if p.expect(token.RBRACE) == -1 {481 return nil482 }483 sib.AddExpression(x)484 }485 }486}487func (p *parser) parseMapLiteral() ast.Node {488 offs := p.s.offset489 p.next()490 var keys []ast.Node491 var values []ast.Node492 keys = make([]ast.Node, 0)493 values = make([]ast.Node, 0)494 if p.cTok != token.RBRACE {495 loop:496 for {497 k, _ := p.parseOperand()498 keys = append(keys, k)499 if p.expect(token.COLON) != -1 {500 v, _ := p.parseOperand()501 values = append(values, v)502 } else {503 return nil504 }505 switch p.cTok {506 case token.RBRACE, token.EOF:507 break loop508 case token.COMMA:509 p.next()510 if p.cTok == token.RBRACE {511 break loop512 }513 }514 }515 }516 if p.expect(token.RBRACE) != -1 {517 return &ast.MapLiteral{Base: &ast.Base{Offset: offs, File: p.tfile}, Keys: keys, Values: values}518 } else {519 return nil520 }521}522func (p *parser) parserArrayLiteral() ast.Node {523 offs := p.s.offset524 p.next()525 var values []ast.Node526 if p.cTok != token.RBRACK {527 x, tok := p.parseOperand()528 if isGlob, nodes := parseArrayFile(x, tok); isGlob {529 values = append(values, nodes...)530 } else {531 values = append(values, x)532 }533 } else {534 values = make([]ast.Node, 0)535 }536loop:537 for {538 switch p.cTok {539 case token.RBRACK, token.EOF:540 break loop541 case token.COMMA:542 p.next()543 if p.cTok == token.RBRACK {544 break loop545 }546 y, tok := p.parseOperand()547 if isGlob, nodes := parseArrayFile(y, tok); isGlob {548 values = append(values, nodes...)549 } else {550 values = append(values, y)551 }552 }553 }554 if p.expect(token.RBRACK) != -1 {555 return &ast.ArrayLiteral{Base: &ast.Base{Offset: offs, File: p.tfile}, Values: values}556 } else {557 return nil558 }559}560func (p *parser) parseForLoop(inForLoop bool) {561 offs := p.cOffs562 p.next()563 label := ""564 if p.cTok == token.COLON {565 if p.next(); p.cTok != token.IDENT {566 p.errorHandler(p.curPos(), "expect for loop label but got %s", p.cTok)567 return568 }569 label = p.cLit570 p.next()571 }572 ioffs, ilit := p.cOffs, p.cLit573 var (574 i, value *ast.Ident575 oprd ast.Node576 lrange *ast.Interval577 blcOffs int578 )579 if p.cTok == token.IDENT {580 p.next()581 if p.cTok == token.COMMA {582 // for in array, map or string583 p.next()584 voffs, vlit := p.cOffs, p.cLit585 if p.expect(token.IDENT) == -1 {586 return587 }588 if p.expect(token.IN) == -1 {589 return590 }591 var tok token.Token592 oprd, tok = p.parseOperand()593 switch tok {594 case token.INTEGER, token.FLOAT, token.BOOLEAN:595 p.errorHandler(oprd.Position(), "for loop can iterate through %s", tok)596 return597 }598 blcOffs = p.cOffs599 if p.expect(token.LBRACE) == -1 {600 return601 }602 i = &ast.Ident{Base: &ast.Base{Offset: ioffs, File: p.tfile}, Name: ilit}603 value = &ast.Ident{Base: &ast.Base{Offset: voffs, File: p.tfile}, Name: vlit}604 } else {605 // a range loop606 if p.expect(token.IN) == -1 {607 return608 }609 lrange = p.parseInterval()610 blcOffs = p.cOffs611 if p.expect(token.LBRACE) == -1 {612 return613 }614 i = &ast.Ident{Base: &ast.Base{Offset: ioffs, File: p.tfile}, Name: ilit}615 }616 } else if p.expect(token.LBRACE) == -1 {617 return618 }619 bstmt := &ast.BlockStatement{Base: &ast.Base{Offset: blcOffs, File: p.tfile}}620 if p.parseBlock(true, bstmt) {621 p.block.Append(&ast.ForStatement{622 Base: &ast.Base{Offset: offs, File: p.tfile},623 Label: label,624 I: i,625 Value: value,626 Oprnd: oprd,627 Range: lrange,628 Insts: bstmt,629 })630 }631}632func (p *parser) parseIf(inForLoop bool, elstmt *ast.ElseStatement) {633 offs := p.cOffs634 // switch p.nTok {635 // case token.ON:636 // p.next()637 // condOffs := p.cOffs638 // switch p.next(); p.cTok {639 // case token.LINUX, token.MACOS, token.WINDOWS:640 // cond = &ast.OSysCheck{Base: &ast.Base{Offset: condOffs, File: p.tfile}, OS: p.cTok}641 // p.next()642 // default:643 // p.errorHandler(p.curPos(), "expect operating system keyword got %s (%s)", p.cTok, p.cLit)644 // return645 // }646 // case token.HASH, token.AT, token.FD:647 // p.next()648 // op := p.cTok649 // base := &ast.Base{Offset: p.cOffs, File: p.tfile}650 // var x ast.Node651 // if p.next(); p.cTok == token.IDENT || (p.cTok == token.STRING && op == token.FD) {652 // soffs, tok, lit := p.cOffs, p.cTok, p.cLit653 // if p.next(); p.cTok != token.EXISTS {654 // p.errorHandler(p.curPos(), "expect %s but got %s", token.EXISTS, p.cTok)655 // return656 // }657 // if tok == token.STRING {658 // x = &ast.BasicLit{Base: base, Lit: lit, Kind: tok, Mark: p.s.src[soffs-1]}659 // } else {660 // x = &ast.Ident{Base: base, Name: lit}661 // }662 // } else {663 // p.errorHandler(p.curPos(), "expect identifier or file expression but got %s", p.cTok)664 // return665 // }666 // cond = &ast.Exists{Base: base, Op: op, X: x}667 // p.next()668 // default:669 cond := p.parseBinaryExpr(false, token.LowestPrec+1)670 blcOffs := p.cOffs671 if p.expect(token.LBRACE) == -1 {672 return673 }674 bstmt := &ast.BlockStatement{Base: &ast.Base{Offset: blcOffs, File: p.tfile}}675 if p.parseBlock(inForLoop, bstmt) {676 ifstmt := &ast.IfStatement{677 Base: &ast.Base{Offset: offs, File: p.tfile},678 Cond: cond,679 Insts: bstmt,680 }681 if elstmt == nil {682 p.block.Append(ifstmt)683 } else {684 elstmt.IfStmt = ifstmt685 }686 // parse else statement if there any687 if p.cTok != token.ELSE {688 return689 }690 elOffs := p.cOffs691 p.next()692 elstmt := &ast.ElseStatement{693 Base: &ast.Base{Offset: elOffs, File: p.tfile},694 }695 if p.cTok == token.IF {696 p.parseIf(inForLoop, elstmt)697 } else {698 blcOffs = p.cOffs699 if p.expect(token.LBRACE) == -1 {700 return701 }702 elstmt.Insts = &ast.BlockStatement{Base: &ast.Base{Offset: blcOffs, File: p.tfile}}703 if !p.parseBlock(inForLoop, elstmt.Insts) {704 return705 }706 }707 ifstmt.Else = elstmt708 }709}710func (p *parser) parseBlock(inForLoop bool, block *ast.BlockStatement) bool {711 prevBlock := p.block712 p.block = block713 defer func() { p.block = prevBlock }()714 for p.cTok != token.RBRACE && p.cTok != token.EOF {715 switch p.cTok {716 case token.IDENT:717 if p.nTok == token.INC || p.nTok == token.DEC {718 offs, lit := p.cOffs, p.cLit719 p.next()720 p.block.Append(&ast.ExprWrapperStatement{721 X: &ast.IncDec{722 Op: p.cTok,723 X: &ast.Ident{Base: &ast.Base{Offset: offs, File: p.tfile}, Name: lit},724 },725 })726 p.next()727 p.expect(token.LF)728 } else {729 if p.nTok == token.LBRACK {730 if x := p.parseIndexExpression(); x != nil {731 p.parseAssignStatement(x)732 }733 } else {734 settable := &ast.Ident{Base: &ast.Base{Offset: p.cOffs, File: p.tfile}, Name: p.cLit}735 p.parseAssignStatement(settable)736 }737 }738 case token.AT, token.HASH:739 // parse invocation command call740 p.parseCallReference(false, nil)741 case token.FOR:742 p.parseForLoop(inForLoop)743 case token.IF:744 p.parseIf(inForLoop, nil)745 case token.EXIT:746 // parse exit747 offs := p.cOffs748 if code := p.parseBinaryExpr(false, token.LowestPrec+1); code != nil {749 p.block.Append(&ast.ExprWrapperStatement{750 X: &ast.Exit{Base: &ast.Base{Offset: offs, File: p.tfile}, ExitCode: code},751 })752 }753 p.expect(token.LF)754 case token.RETURN:755 // parse return756 p.block.Append(&ast.ReturnStatement{757 Base: &ast.Base{Offset: p.cOffs, File: p.tfile},758 X: p.parseBinaryExpr(false, token.LowestPrec+1),759 })760 case token.BREAK, token.CONTINUE:761 offs, label, op := p.cOffs, "", p.cTok762 p.next()763 if p.cTok == token.COLON {764 p.next()765 label = p.cLit766 if p.expect(token.IDENT) == -1 {767 return false768 }769 }770 p.block.Append(&ast.BreakContinueStatement{771 Base: &ast.Base{Offset: offs, File: p.tfile},772 Op: op,773 Label: label,774 })775 if p.cTok == token.LF {776 // skip optional line feed777 p.next()778 }779 case token.COMMENT:780 p.next()781 // eat comment for now782 // TODO: add comment to token file783 }784 }785 endBlock := p.cTok == token.RBRACE786 p.next()787 if p.cTok == token.LF {788 p.next()789 }790 return endBlock791}792func (p *parser) parseIndexExpression() ast.SettableNode {793 offs, lit := p.cOffs, p.cLit794 if p.next(); p.cTok != token.LBRACK {795 p.errorHandler(p.curPos(), "expect [ but got %s", p.cTok)796 return nil797 }798 index := p.parseBinaryExpr(false, token.LowestPrec+1)799 if p.cTok != token.RBRACK {800 return nil801 }802 base := &ast.Base{Offset: offs, File: p.tfile}803 return &ast.Index{804 Base: base,805 Index: index,806 X: &ast.Ident{Base: base, Name: lit},807 }808}809func (p *parser) parseInterval() *ast.Interval {810 offs := p.cOffs811 var aic, bic bool812 switch p.cTok {813 case token.LBRACE, token.LPAREN, token.RBRACK:814 p.next()815 aic = false816 case token.LBRACK:817 p.next()818 fallthrough819 default:820 aic = true821 }822 a, _ := p.parseOperand()823 if p.expect(token.RANGE) == -1 {824 return nil825 }826 b, _ := p.parseOperand()827 switch p.cTok {828 case token.RBRACE, token.RPAREN, token.LBRACK:829 p.next()830 bic = false831 case token.RBRACK:832 p.next()833 fallthrough834 default:835 bic = true836 }837 return &ast.Interval{Base: &ast.Base{Offset: offs, File: p.tfile}, A: a, AInclude: aic, B: b, BInclude: bic}838}839func (p *parser) parseTernaryExpr(x ast.Node) ast.Node {840 offs := p.cOffs841 tx := p.parseBinaryExpr(false, token.LowestPrec+1)842 if p.cTok == token.COLON {843 return &ast.Conditional{844 Base: &ast.Base{Offset: offs, File: p.tfile},845 Cond: x,846 True: tx,847 False: p.parseBinaryExpr(false, token.LowestPrec+1),848 }849 } else {850 p.errorHandler(p.curPos(), "expect ':' but got %s", p.cTok)851 }852 return nil853}854func (p *parser) parseFallbackExpr(x ast.Node) ast.Node {855 offs := p.cOffs856 return &ast.Fallback{857 Base: &ast.Base{Offset: offs, File: p.tfile},858 Primary: x,859 Default: p.parseBinaryExpr(false, token.LowestPrec+1),860 }861}862func (p *parser) parseIsExpr(x ast.Node) ast.Node {863 offs := p.cOffs864 var types []token.Token865 p.next()866 for {867 switch {868 case token.TINTEGER <= p.cTok && p.cTok <= token.TMAP:869 types = append(types, p.cTok)870 case p.cTok == token.OR:871 // do nothing872 default:873 return &ast.IsType{874 Base: &ast.Base{Offset: offs, File: p.tfile},875 X: x,876 Types: types,877 }878 }879 p.next()880 }881}882func (p *parser) parseTypeCaseExpr() ast.Node {883 offs := p.cOffs884 to := p.cTok885 p.next()886 if p.cTok == token.LPAREN {887 x := p.parseBinaryExpr(false, token.LowestPrec+1)888 if p.expect(token.RPAREN) != -1 {889 return &ast.TypeCast{890 Base: &ast.Base{Offset: offs, File: p.tfile},891 To: to,892 X: x,893 }894 }895 } else {896 p.errorHandler(p.curPos(), "expected (")897 }898 return nil899}900func (p *parser) parseTransformation() ast.Node {901 ioffs, ilit := p.cOffs, p.cLit902 p.next()903 ftOffs := p.cOffs904 if p.expect(token.LPAREN) == -1 {905 return nil906 }907 if fn := p.parseDeclareFunction(true); fn != nil {908 return &ast.Transformation{909 Base: &ast.Base{Offset: ftOffs, File: p.tfile},910 Ident: &ast.Ident{Base: &ast.Base{Offset: ioffs, File: p.tfile}, Name: ilit},911 Fn: fn,912 }913 }914 return nil915}916func (p *parser) parseCallReference(assign bool, prev *ast.Call) ast.Node {917 callOffs := p.cOffs918 kind := p.cTok919 p.next()920 name := p.cLit921 if p.expect(token.IDENT) == -1 {922 return nil923 }924 var args []ast.Node925 var redirect *ast.RedirectTo926 for p.cTok != token.LF && p.cTok != token.EOF {927 switch p.cTok {928 case token.WRITE_TO, token.APPEND_TO:929 if redirect != nil {930 p.errorHandler(p.curPos(), "multiple write (>) or append (>>) to")931 return nil932 }933 redirect = &ast.RedirectTo{934 Base: &ast.Base{Offset: p.cOffs, File: p.tfile},935 Append: p.cTok == token.APPEND_TO,936 }937 p.next()938 case token.READ_FROM:939 if redirect != nil {940 p.errorHandler(p.curPos(), "read from syntax is not allow after write or append to file")941 return nil942 }943 offs := p.cOffs944 p.next()945 x, _ := p.parseOperand()946 args = append(args, &ast.ReadFrom{947 Base: &ast.Base{Offset: offs, File: p.tfile},948 File: x,949 })950 case token.PIPE:951 goto end952 default:953 x, _ := p.parseOperand()954 if redirect != nil {955 redirect.Files = append(redirect.Files, x)956 } else {957 args = append(args, x)958 }959 }960 }961end:962 if tok := p.cTok; p.cTok == token.LF || p.cTok == token.PIPE {963 var node ast.Node964 node = &ast.Call{965 Base: &ast.Base{Offset: callOffs, File: p.tfile},966 Kind: kind,967 Name: name,968 Args: args,969 }970 p.next()971 if tok == token.PIPE {972 nextNode := p.parseCallReference(true, node.(*ast.Call))973 node = &ast.Pipe{974 X: node.(*ast.Call),975 Y: nextNode,976 }977 } else if redirect != nil {978 redirect.Caller = node979 node = redirect980 }981 if assign {982 return node983 } else {984 p.block.Append(&ast.ExprWrapperStatement{X: node})985 return nil986 }987 }988 return nil989}990func (p *parser) parseDeclareFunction(literal bool) *ast.Function {991 var name string992 if !literal {993 name = p.cLit994 if p.expect(token.IDENT) == -1 || p.expect(token.LPAREN) == -1 {995 return nil996 }997 }998 if args := p.parseDeclareArgument(); args == nil {999 return nil1000 } else if p.expect(token.RPAREN) != -1 {1001 blcOff := p.cOffs1002 switch p.cTok {1003 case token.LAMBDA:1004 if x := p.parseBinaryExpr(false, token.LowestPrec+1); x != nil {1005 return &ast.Function{1006 Lambda: token.LAMBDA,1007 Args: args,1008 X: x,1009 }1010 }1011 case token.LBRACE:1012 p.next()1013 block := &ast.BlockStatement{Base: &ast.Base{Offset: blcOff, File: p.tfile}}1014 if p.parseBlock(false, block) {1015 return &ast.Function{1016 Name: name,1017 Args: args,1018 Insts: block,1019 }1020 }1021 default:1022 p.errorHandler(p.curPos(), "unexpected token %s", p.cTok)1023 }1024 }1025 return nil1026}1027func (p *parser) parseDeclareArgument() []*ast.Ident {1028 var args []*ast.Ident1029 for p.cTok != token.RPAREN {1030 if p.cTok == token.IDENT {1031 args = append(args, &ast.Ident{Base: &ast.Base{Offset: p.cOffs, File: p.tfile}, Name: p.cLit})1032 if p.next(); p.cTok == token.COMMA {1033 p.next()1034 }1035 } else {1036 p.errorHandler(p.curPos(), "expect identifier but got %s", p.cTok)1037 return nil1038 }1039 }1040 return args1041}1042func parseArrayFile(n ast.Node, tok token.Token) (isGlob bool, x []ast.Node) {1043 if tok == token.STRING {1044 bl := n.(*ast.BasicLit)1045 if mes, err := filepath.Glob(bl.Lit); err != nil || len(mes) == 0 {1046 return false, nil1047 } else {1048 for _, sf := range mes {1049 x = append(x, &ast.BasicLit{Lit: sf, Kind: token.STRING, Mark: bl.Mark})1050 }1051 }1052 return true, x1053 } else {1054 return false, nil1055 }1056}...

Full Screen

Full Screen

compiler_test.go

Source:compiler_test.go Github

copy

Full Screen

...109 target := targets.List["test"][arch]110 t.Run(arch, func(t *testing.T) {111 t.Parallel()112 em := ast.NewErrorMatcher(t, filepath.Join("testdata", "errors.txt"))113 desc := ast.Parse(em.Data, "errors.txt", em.ErrorHandler)114 if desc == nil {115 em.DumpErrors(t)116 t.Fatalf("parsing failed")117 }118 ExtractConsts(desc, target, em.ErrorHandler)119 em.Check(t)120 })121 }122}123func TestErrors2(t *testing.T) {124 t.Parallel()125 consts := map[string]uint64{126 "SYS_foo": 1,127 "C0": 0,128 "C1": 1,129 "C2": 2,130 }131 for _, arch := range []string{"32_shmem", "64"} {132 target := targets.List["test"][arch]133 t.Run(arch, func(t *testing.T) {134 t.Parallel()135 em := ast.NewErrorMatcher(t, filepath.Join("testdata", "errors2.txt"))136 desc := ast.Parse(em.Data, "errors2.txt", em.ErrorHandler)137 if desc == nil {138 em.DumpErrors(t)139 t.Fatalf("parsing failed")140 }141 info := ExtractConsts(desc, target, em.ErrorHandler)142 if info == nil {143 em.DumpErrors(t)144 t.Fatalf("const extraction failed")145 }146 Compile(desc, consts, target, em.ErrorHandler)147 em.Check(t)148 })149 }150}151func TestWarnings(t *testing.T) {152 t.Parallel()153 consts := map[string]uint64{154 "SYS_foo": 1,155 }156 for _, arch := range []string{"32_shmem", "64"} {157 target := targets.List["test"][arch]158 t.Run(arch, func(t *testing.T) {159 t.Parallel()160 em := ast.NewErrorMatcher(t, filepath.Join("testdata", "warnings.txt"))161 desc := ast.Parse(em.Data, "warnings.txt", em.ErrorHandler)162 if desc == nil {163 em.DumpErrors(t)164 t.Fatalf("parsing failed")165 }166 info := ExtractConsts(desc, target, em.ErrorHandler)167 if info == nil {168 em.DumpErrors(t)169 t.Fatalf("const extraction failed")170 }171 p := Compile(desc, consts, target, em.ErrorHandler)172 if p == nil {173 em.DumpErrors(t)174 t.Fatalf("compilation failed")175 }176 em.Check(t)177 })178 }179}180func TestFuzz(t *testing.T) {181 t.Parallel()182 for _, data := range []string{183 "d~^gB̉`i\u007f?\xb0.",184 "da[",185 "define\x98define(define\x98define\x98define\x98define\x98define)define\tdefin",186 "resource g[g]",187 `t[188l t189]`,190 `t()D[0]191type D[e]l`,192 "E",193 "#",194 `195type p b[L]196type b[L] {197 e b[L[L]]198}`,199 } {200 Fuzz([]byte(data)[:len(data):len(data)])201 }202}203func TestAlign(t *testing.T) {204 t.Parallel()205 const input = `206foo$0(a ptr[in, s0])207s0 {208 f0 int8209 f1 int16210}211foo$1(a ptr[in, s1])212s1 {213 f0 ptr[in, s2, opt]214}215s2 {216 f1 s1217 f2 array[s1, 2]218 f3 array[array[s1, 2], 2]219}220 `221 desc := ast.Parse([]byte(input), "input", nil)222 if desc == nil {223 t.Fatal("failed to parse")224 }225 p := Compile(desc, map[string]uint64{"SYS_foo": 1}, targets.List["test"]["64"], nil)226 if p == nil {227 t.Fatal("failed to compile")228 }229 got := p.StructDescs[0].Desc230 t.Logf("got: %#v", got)231}232func TestCollectUnusedError(t *testing.T) {233 t.Parallel()234 const input = `235 s0 {236 f0 fidl_string237 }238 `239 nopErrorHandler := func(pos ast.Pos, msg string) {}240 desc := ast.Parse([]byte(input), "input", nopErrorHandler)241 if desc == nil {242 t.Fatal("failed to parse")243 }244 _, err := CollectUnused(desc, targets.List["test"]["64"], nopErrorHandler)245 if err == nil {246 t.Fatal("CollectUnused should have failed but didn't")247 }248}249func TestCollectUnused(t *testing.T) {250 t.Parallel()251 inputs := []struct {252 text string253 names []string254 }{255 {256 text: `257 s0 {258 f0 string...

Full Screen

Full Screen

ErrorHandler

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 f, err := parser.ParseFile(fset, "2.go", nil, parser.ParseComments)4 if err != nil {5 fmt.Println(err)6 }

Full Screen

Full Screen

ErrorHandler

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 f, err := parser.ParseFile(fset, "sample.go", nil, parser.AllErrors)4 if err != nil {5 fmt.Println("Error in parsing file")6 }7 ast.Inspect(f, func(n ast.Node) bool {8 switch x := n.(type) {9 fmt.Println("Bad expression at position", fset.Position(x.Pos()))10 fmt.Println("Bad statement at position", fset.Position(x.Pos()))11 fmt.Println("Bad declaration at position", fset.Position(x.Pos()))12 }13 })14}

Full Screen

Full Screen

ErrorHandler

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 f, err := parser.ParseFile(fset, "test.go", nil, parser.AllErrors)4 if err != nil {5 fmt.Println(err)6 }7 ast.Inspect(f, func(n ast.Node) bool {8 if n == nil {9 }10 switch x := n.(type) {11 if x.Body == nil {12 fmt.Println(x.Name.Name, "has no body")13 }14 }15 })16}17Go | ast.Inspect() Method18Go | ast.Walk() Method

Full Screen

Full Screen

ErrorHandler

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 f, err := parser.ParseFile(fset, "1.go", nil, parser.ParseComments)4 if err != nil {5 fmt.Println(err)6 }7 ast.Print(fset, f)8 ast.Error = func(err error) {9 fmt.Println(err)10 }11}

Full Screen

Full Screen

ErrorHandler

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 fset := token.NewFileSet()4 f, err := parser.ParseFile(fset, "test.go", nil, parser.ParseComments)5 if err != nil {6 fmt.Println(err)7 }8 ast.Inspect(f, func(n ast.Node) bool {9 switch x := n.(type) {10 fmt.Println(x.Name)11 }12 })13}14}15The ast.Walk() method is different from the ast.Inspect() method in that the ast.Walk() method will not traverse the entire tree. It will stop traversing the tree when it encounters a node that is not a type of ast.Node. The ast.Walk() method is used to traverse the tree in a depth-first order. The first parameter is the AST tree, and the second parameter is a function that will be called for each node in the tree. The function should return a boolean value. If

Full Screen

Full Screen

ErrorHandler

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 import "fmt"4 func main() {5 fmt.Println("Hello, playground")6 }`7 fset := token.NewFileSet()8 file, err := parser.ParseFile(fset, "", src, 0)9 if err != nil {10 log.Fatal(err)11 }12 ast.Inspect(file, func(n ast.Node) bool {

Full Screen

Full Screen

ErrorHandler

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 fset := token.NewFileSet()4 file, err := parser.ParseFile(fset, "1.go", nil, 0)5 if err != nil {6 fmt.Println(err)7 }8 errh := ast.ErrorHandler(func(pos token.Position, msg string) {9 fmt.Println(msg)10 })11 file, err = parser.ParseFile(fset, "1.go", nil, parser.ParseComments, errh)12 if err != nil {13 fmt.Println(err)14 }15 fmt.Println(file)16}

Full Screen

Full Screen

ErrorHandler

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 node := ast.Ident{4 }5 node.ErrorHandler = func(err error) {6 fmt.Println(err)7 }8 node.Error("error")9}

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful