Run Ginkgo automation tests on LambdaTest cloud grid
Perform automation testing on 3000+ real desktop and mobile devices online.
package lexer
import (
"fmt"
"github.com/omnilium/mimic/internal/types"
"github.com/omnilium/mimic/internal/utility"
"regexp"
"strings"
)
const FILTER_SEPARATOR = "|"
const FILTER_ARGUMENT_SEPARATOR = ":"
const VARIABLE_ATTRIBUTE_SEPARATOR = "."
const BLOCK_TAG_START = "{%"
const BLOCK_TAG_END = "%}"
const VARIABLE_TAG_START = "{{"
const VARIABLE_TAG_END = "}}"
const COMMENT_TAG_START = "{#"
const COMMENT_TAG_END = "#}"
const SINGLE_BRACE_START = "{"
const SINGLE_BRACE_END = "}"
type Lexer struct {
template string
verbatim bool
r *regexp.Regexp
verbatimBlock string
}
// NewLexer initializes a new Lexer with the given template string and a flag for verbatim usage.
func NewLexer(template string) (*Lexer, error) {
r, err := regexp.Compile(`({%.*?%}|{{.*?}}|{#.*?#})`)
if err != nil {
return nil, err
}
return &Lexer{
template: template,
verbatim: false,
r: r,
verbatimBlock: "",
}, nil
}
func (l *Lexer) Tokenize() []types.Token {
inTag := false
lineNo := 1
var result []types.Token
for _, tokenString := range utility.SmartSplitWithRegex(l.r, l.template, -1) {
if tokenString != "" {
result = append(result, l.createToken(tokenString, lineNo, inTag))
lineNo += strings.Count(tokenString, "\n")
}
inTag = !inTag
}
return result
}
func (l *Lexer) createToken(tokenString string, lineNo int, inTag bool) types.Token {
if inTag {
tokenStart := tokenString[0:2]
if tokenStart == BLOCK_TAG_START {
content := strings.TrimSpace(tokenString[2 : len(tokenString)-2])
if l.verbatim {
if content != l.verbatimBlock {
return types.Token{
TokenType: types.TOKEN_TEXT,
Content: tokenString,
LineNo: lineNo,
}
}
l.verbatim = false
} else if content[:8] == "verbatim" {
l.verbatim = true
l.verbatimBlock = fmt.Sprintf("end%s", content)
}
return types.Token{
TokenType: types.TOKEN_BLOCK,
Content: content,
LineNo: lineNo,
}
}
if l.verbatim == false {
content := strings.TrimSpace(tokenString[2 : len(tokenString)-2])
if tokenStart == VARIABLE_TAG_START {
return types.Token{
TokenType: types.TOKEN_VAR,
Content: content,
LineNo: lineNo,
}
}
if tokenStart == COMMENT_TAG_START {
return types.Token{
TokenType: types.TOKEN_COMMENT,
Content: content,
LineNo: lineNo,
}
}
}
}
return types.Token{
TokenType: types.TOKEN_TEXT,
Content: tokenString,
LineNo: lineNo,
}
}
package main
import (
"errors"
"go/ast"
"go/parser"
"go/token"
"io/ioutil"
"log"
"os"
"path"
"strings"
"golang.org/x/tools/imports"
)
type Func struct {
Name string
Args string
Return string
}
func (f Func) Interface() string {
return f.Name + f.Args + " " + f.Return
}
func (f Func) Method() string {
return f.Name + " func" + f.Args + " " + f.Return
}
func main() {
pkgDir := "app/submodule"
typeMap := map[string]string{
"Partition": "chainApiTypes.Partition",
"Deadline": "chainApiTypes.Deadline",
"MarketDeal": "chainApiTypes.MarketDeal",
"BlockTemplate": "mineApiTypes.BlockTemplate",
"InvocResult": "syncApiTypes.InvocResult",
"ProtocolParams": "chainApiTypes.ProtocolParams",
"BlockMessage": "chainApiTypes.BlockMessage",
"MsgLookup": "messageApiTypes.MsgLookup",
"BlockMessages": "chainApiTypes.BlockMessages",
}
_, pkgs, err := collectAPIFile(pkgDir)
if err != nil {
log.Fatal(err)
return
}
codes := make(map[string][]Func)
for _, pkg := range pkgs {
for _, files := range pkg.Files {
for _, decl := range files.Decls {
if funcDecl, ok := decl.(*ast.FuncDecl); ok {
// just func Decl
function := Func{}
receiveName, err := joinFieldList(funcDecl.Recv, typeMap)
if err != nil {
log.Fatal(err)
return
}
receiveName = strings.Trim(receiveName, "*")
if receiveName == "" {
continue
}
function.Name = funcDecl.Name.Name
if !('A' <= function.Name[0] && function.Name[0] <= 'Z') {
continue
}
// parser parameter
fieldList, err := joinFieldList(funcDecl.Type.Params, typeMap)
if err != nil {
log.Fatal(err)
return
}
function.Args = "(" + fieldList + ")"
//parser return values
fieldList, err = joinFieldList(funcDecl.Type.Results, typeMap)
if err != nil {
log.Fatal(err)
return
}
if funcDecl.Type.Results != nil && len(funcDecl.Type.Results.List) > 0 {
function.Return = "(" + fieldList + ")"
} else {
function.Return = fieldList
}
if _, has := codes[receiveName]; has {
codes[receiveName] = append(codes[receiveName], function)
} else {
codes[receiveName] = []Func{function}
}
}
}
}
}
_ = generateCode(codes, "./app/client/client.go")
}
func generateCode(codelines map[string][]Func, fname string) error {
fs, err := os.Create(fname)
if err != nil {
return err
}
packages := `package client
import (
"context"
"io"
"time"
"github.com/filecoin-project/go-address"
"github.com/filecoin-project/go-bitfield"
"github.com/filecoin-project/go-state-types/abi"
"github.com/filecoin-project/go-state-types/big"
acrypto "github.com/filecoin-project/go-state-types/crypto"
"github.com/filecoin-project/go-state-types/dline"
"github.com/filecoin-project/go-state-types/network"
"github.com/ipfs/go-cid"
ipld "github.com/ipfs/go-ipld-format"
"github.com/libp2p/go-libp2p-core/metrics"
"github.com/libp2p/go-libp2p-core/peer"
ma "github.com/multiformats/go-multiaddr"
chainApiTypes "github.com/filecoin-project/venus/app/submodule/chain"
mineApiTypes "github.com/filecoin-project/venus/app/submodule/mining"
"github.com/filecoin-project/venus/app/submodule/mpool"
syncApiTypes "github.com/filecoin-project/venus/app/submodule/syncer"
"github.com/filecoin-project/venus/pkg/beacon"
"github.com/filecoin-project/venus/pkg/chain"
"github.com/filecoin-project/venus/pkg/chainsync/status"
"github.com/filecoin-project/venus/pkg/crypto"
"github.com/filecoin-project/venus/pkg/net"
"github.com/filecoin-project/venus/pkg/specactors/builtin/miner"
"github.com/filecoin-project/venus/pkg/types"
"github.com/filecoin-project/venus/pkg/vm"
"github.com/filecoin-project/venus/pkg/wallet"
)
`
builder := strings.Builder{}
builder.WriteString(packages)
builder.WriteString("type FullNode struct {\n")
for _, functions := range codelines {
for _, function := range functions {
builder.WriteString("\t" + function.Method() + "\n")
}
builder.WriteString("\n")
}
builder.WriteString("}\n\n")
for name, functions := range codelines {
builder.WriteString("type " + name + " struct {\n")
for _, function := range functions {
builder.WriteString("\t" + function.Method() + "\n")
}
builder.WriteString("}\n\n")
}
_, _ = fs.WriteString(builder.String())
_ = fs.Close()
options := &imports.Options{
TabWidth: 8,
TabIndent: true,
Comments: true,
Fragment: true,
}
res, err := imports.Process(fname, nil, options)
if err != nil {
return err
}
return ioutil.WriteFile(fname, res, 0777)
}
func joinFieldList(fields *ast.FieldList, typeMap map[string]string) (string, error) {
if fields == nil || len(fields.List) == 0 {
return "", nil
}
returnString := ""
returns := fields.List
for _, r := range returns {
tokeString, err := typeString(r.Type, typeMap)
if err != nil {
log.Fatal(err)
return "", err
}
returnString += tokeString + ","
}
returnString = strings.Trim(returnString, ",")
return returnString, nil
}
func typeString(token ast.Expr, typeMap map[string]string) (string, error) {
tokenString := ""
switch t := token.(type) {
case *ast.SelectorExpr:
name, err := typeString(t.X, typeMap)
if err != nil {
return tokenString, err
}
tokenString += name + "." + t.Sel.String()
case *ast.Ident:
if token, has := typeMap[t.String()]; has {
tokenString += token
} else {
tokenString += t.String()
}
case *ast.StarExpr:
name, err := typeString(t.X, typeMap)
if err != nil {
return tokenString, err
}
tokenString += "*" + name
case *ast.ArrayType:
name, err := typeString(t.Elt, typeMap)
if err != nil {
return tokenString, err
}
tokenString += "[]" + name
case *ast.InterfaceType:
tokenString += "interface{}"
case *ast.ChanType:
name, err := typeString(t.Value, typeMap)
if err != nil {
return tokenString, err
}
tokenString += "chan " + name
case *ast.MapType:
keyString, err := typeString(t.Key, typeMap)
if err != nil {
return tokenString, err
}
valueString, err := typeString(t.Value, typeMap)
if err != nil {
return tokenString, err
}
tokenString += "map[" + keyString + "]" + valueString
default:
return "", errors.New("unexpect types")
}
return tokenString, nil
}
func collectAPIFile(dir string) (*token.FileSet, map[string]*ast.Package, error) {
files, err := ioutil.ReadDir(dir)
if err != nil {
return nil, nil, err
}
fset := token.NewFileSet()
pkgs := make(map[string]*ast.Package)
for _, f := range files {
subDirPath := path.Join(dir, f.Name())
subModuleFiles, err := ioutil.ReadDir(subDirPath)
if err != nil {
return nil, nil, err
}
for _, goFile := range subModuleFiles {
if !goFile.IsDir() && strings.HasSuffix(goFile.Name(), "_api.go") {
gofileName := path.Join(subDirPath, goFile.Name())
if src, err := parser.ParseFile(fset, gofileName, nil, 0); err == nil {
name := src.Name.Name
pkg, found := pkgs[name]
if !found {
pkg = &ast.Package{
Name: name,
Files: make(map[string]*ast.File),
}
pkgs[name] = pkg
}
pkg.Files[gofileName] = src
} else {
return nil, nil, err
}
}
}
}
return fset, pkgs, nil
}
package ethapi
import (
"fmt"
"github.com/dgrijalva/jwt-go"
"github.com/golang/protobuf/proto"
"math/big"
"pdx-chain/common"
"pdx-chain/core/publicBC"
"pdx-chain/core/types"
"pdx-chain/core/vm"
"pdx-chain/crypto"
"pdx-chain/ethdb"
"pdx-chain/log"
"pdx-chain/pdxcc"
pb "pdx-chain/pdxcc/protos"
"pdx-chain/pdxcc/util"
"pdx-chain/utopia"
"pdx-chain/utopia/utils"
)
func ParseToken(tx *types.Transaction, stateDB vm.StateDB) (token string, err error) {
//if cc tx
var ok bool
var jwtBuf []byte
to := tx.To()
if to != nil {
if pdxcc.CanExec(*to) {
//is cc
token, err := parseCCToken(tx.Data())
if err != nil {
return "", fmt.Errorf("parse cc token:%v", err)
}
return token, nil
}
//keyHash := utils.CCKeyHash
//ccBuf := stateDB.GetPDXState(*to, keyHash)
ccBuf := stateDB.GetCode(*to)
if len(ccBuf) != 0 {
var deploy pb.Deployment
err = proto.Unmarshal(ccBuf, &deploy)
if err == nil {
//is cc
token, err := parseCCToken(tx.Data())
if err != nil {
return "", fmt.Errorf("parse cc token:%v", err)
}
return token, nil
}
}
}
_, meta, err := utopia.ParseData(tx.Data())
if err != nil {
return "", fmt.Errorf("parse data:%v", err)
}
if meta == nil {
return "", fmt.Errorf("meta == nil")
}
jwtBuf, ok = meta["jwt"]
if !ok {
return "", fmt.Errorf("jwt not in meta, meta:%v", meta)
}
return string(jwtBuf), nil
}
func parseCCToken(payload []byte) (token string, err error) {
//payload, _, err := utopia.ParseData(tx.Data()) //maybe parse
txPb := &pb.Transaction{}
err = proto.Unmarshal(payload, txPb)
if err != nil {
return "", fmt.Errorf("proto unmarshal tx:%v", err)
}
var jwtBuf []byte
var ok bool
txType := txPb.Type
switch txType {
case types.Transaction_deploy:
deploy := pb.Deployment{}
err = proto.Unmarshal(txPb.Payload, &deploy)
if err != nil {
return "", fmt.Errorf("proto unmarshal deploy:%v", err)
}
jwtBuf, ok = deploy.Payload.Meta["jwt"]
if !ok {
return "", fmt.Errorf("jwt not in deploy meta")
}
case types.Transaction_invoke: //start stop withdraw
invocation := pb.Invocation{}
err = proto.Unmarshal(txPb.Payload, &invocation)
if err != nil {
return "", fmt.Errorf("proto unmarshal invocation:%v", err)
}
jwtBuf, ok = invocation.Meta["jwt"]
if !ok {
return "", fmt.Errorf("jwt not in invocation meta")
}
}
return string(jwtBuf), nil
}
func CheckFreeze(tx *types.Transaction, stateDB vm.StateDB, from common.Address) bool {
//var signer types.Signer = types.HomesteadSigner{}
//if tx.Protected() {
// signer = types.NewEIP155Signer(tx.ChainId())
//}
//from, err := types.Sender(signer, tx)
//if err != nil {
// log.Error("check freeze get sender", "err", err)
// return false
//}
fromHash := util.EthHash(from.Bytes())
r := stateDB.GetPDXState(utils.AccessCtlContract, fromHash)
if len(r) > 0 && string(r) == "1" {
log.Trace("string(r) == 1")
return true
}
return false
}
func CheckToken(tx *types.Transaction, stateDB vm.StateDB) bool {
if tx.To() != nil && *tx.To() == utils.AccessCtlContract {
log.Trace("access ctl token verify")
tokenString, err := ParseToken(tx, stateDB)
if err != nil {
log.Error("parse token", "err", err)
return false
}
if !checkJWT(tokenString, "a") {
return false
}
return true
}
//DappAuth:T UserAuth:T Deploy:d regular Tx:u/d
if utopia.ConsortiumConf.DappAuth && utopia.ConsortiumConf.UserAuth {
tokenString, err := ParseToken(tx, stateDB)
if err != nil {
log.Error("parse token", "err", err)
return false
}
//if deploy contract tx must role d
if tx.To() == nil || *tx.To() == utils.CCBaapDeploy {
if !checkJWT(tokenString, "d") {
return false
}
} else {
//if not , regular tx must role u at least
if !checkJWT(tokenString, "u/d") {
return false
}
}
}
//DappAuth:F UserAuth:T Deploy:u/d regular Tx:u/d
if !utopia.ConsortiumConf.DappAuth && utopia.ConsortiumConf.UserAuth {
tokenString, err := ParseToken(tx, stateDB)
if err != nil {
log.Error("parse token", "err", err)
return false
}
//if deploy contract tx must role d
if tx.To() == nil || *tx.To() == utils.CCBaapDeploy {
if !checkJWT(tokenString, "u/d") {
return false
}
} else {
//if not , regular tx must role u at least
if !checkJWT(tokenString, "u/d") {
return false
}
}
}
//DappAuth:T UserAuth:F Deploy:d regular Tx:-
if utopia.ConsortiumConf.DappAuth && !utopia.ConsortiumConf.UserAuth {
//if deploy contract tx must role d
if tx.To() == nil || *tx.To() == utils.CCBaapDeploy {
tokenString, err := ParseToken(tx, stateDB)
if err != nil {
log.Error("parse token", "err", err)
return false
}
if !checkJWT(tokenString, "d") {
return false
}
}
}
//DappAuth:F UserAuth:F Deploy:- regular Tx:-
return true
}
func checkJWT(tokenString string, roleLimit string) (success bool) {
if tokenString == "" {
log.Error("tokenString empty")
return false
}
token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) {
// Don't forget to validate the alg is what you expect:
if _, ok := token.Method.(*jwt.SigningMethodECDSA); !ok {
return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"])
}
if token.Header["alg"] != "ES256" {
return nil, fmt.Errorf("invalid signing alg:%v, only ES256 is prefered", token.Header["alg"])
}
claims, ok := token.Claims.(jwt.MapClaims)
if !ok {
return nil, fmt.Errorf("token claims type error")
}
ak, ok := claims["ak"]
if !ok {
return nil, fmt.Errorf("ak no exist in claims")
}
hexPubKey, ok := ak.(string)
if !ok || len(hexPubKey) != vm.PUBK_HEX_LEN {
return nil, fmt.Errorf("ak format error")
}
//check public key
_, ok = utopia.UserCertPublicKeyMap[hexPubKey]
if !ok {
return nil, fmt.Errorf("ak no exist in user cert public key")
}
return crypto.DecompressPubkey(common.Hex2Bytes(hexPubKey))
})
if err != nil {
log.Error("jwt parse", "err", err)
return false
}
if claims, success := token.Claims.(jwt.MapClaims); success && token.Valid {
limit, success := claims["l"].(float64)
if !success {
log.Error("l not correct")
return false
}
if !checkLimit(tokenString, int64(limit)) {
log.Error("check limit fail")
return false
}
role, success := claims["r"]
if !success {
log.Error("role no match", "role", role, "ok", success)
return false
}
if roleLimit == "d" || roleLimit == "a" {
if role != roleLimit {
log.Error("role no auth", "role", role, "roleLimit", roleLimit)
return false
}
} else {
if role == "u" || role == "d" {
} else {
log.Error("role no exist", "role", role)
return false
}
}
} else {
log.Error("token invalid")
return false
}
return true
}
func checkLimit(tokenString string, limit int64) bool {
db := *ethdb.ChainDb
tokenHash := util.EthHash([]byte(tokenString))
has, err := db.Has(tokenHash.Bytes())
if err != nil {
log.Error("db has", "err", err)
return false
}
currentBlockNum := public.BC.CurrentBlock().Number()
if !has {
expiredBlockNum := big.NewInt(0).Add(big.NewInt(limit), currentBlockNum)
err = db.Put(tokenHash.Bytes(), expiredBlockNum.Bytes())
if err != nil {
log.Error("db put tokenHash", "err", err)
return false
}
} else {
numByts, err := db.Get(tokenHash.Bytes())
if err != nil {
log.Error("db get tokenHash", "err", err)
return false
}
expiredBlockNum := big.NewInt(0).SetBytes(numByts)
if currentBlockNum.Cmp(expiredBlockNum) > 0 {
log.Error("out of limit", "currentBlockNum", currentBlockNum.String(), "expiredBlockNum", expiredBlockNum)
return false
}
}
return true
}
Accelerate Your Automation Test Cycles With LambdaTest
Leverage LambdaTest’s cloud-based platform to execute your automation tests in parallel and trim down your test execution time significantly. Your first 100 automation testing minutes are on us.