How to use tokenizeString method of gop Package

Best Got code snippet using gop.tokenizeString

Run Got automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

token.go

Source: token.go Github

copy
1package gop
2
3import (
4	"encoding/base64"
5	"encoding/json"
6	"reflect"
7	"sort"
8	"strconv"
9	"strings"
10	"time"
11	"unicode"
12	"unicode/utf8"
13)
14
15// LongStringLen is the length of that will be treated as long string
16var LongStringLen = 16
17
18// LongBytesLen is the length of that will be treated as long bytes
19var LongBytesLen = 16
20
21// Type of token
22type Type int
23
24const (
25	// Nil type
26	Nil Type = iota
27	// Bool type
28	Bool
29	// Number type
30	Number
31	// Float type
32	Float
33	// Complex type
34	Complex
35	// String type
36	String
37	// Byte type
38	Byte
39	// Rune type
40	Rune
41	// Chan type
42	Chan
43	// Func type
44	Func
45	// Error type
46	Error
47
48	// Comment type
49	Comment
50
51	// TypeName type
52	TypeName
53
54	// ParenOpen type
55	ParenOpen
56	// ParenClose type
57	ParenClose
58
59	// Dot type
60	Dot
61	// And type
62	And
63
64	// SliceOpen type
65	SliceOpen
66	// SliceItem type
67	SliceItem
68	// InlineComma type
69	InlineComma
70	// Comma type
71	Comma
72	// SliceClose type
73	SliceClose
74
75	// MapOpen type
76	MapOpen
77	// MapKey type
78	MapKey
79	// Colon type
80	Colon
81	// MapClose type
82	MapClose
83
84	// StructOpen type
85	StructOpen
86	// StructKey type
87	StructKey
88	// StructField type
89	StructField
90	// StructClose type
91	StructClose
92)
93
94// Token represents a symbol in value layout
95type Token struct {
96	Type    Type
97	Literal string
98}
99
100// Tokenize a random Go value
101func Tokenize(v interface{}) []*Token {
102	return tokenize(seen{}, []interface{}{}, reflect.ValueOf(v))
103}
104
105type path []interface{}
106
107func (p path) tokens() []*Token {
108	sn := seen{}
109	ts := []*Token{}
110	for i, seg := range p {
111		ts = append(ts, tokenize(sn, []interface{}{}, reflect.ValueOf(seg))...)
112		if i < len(p)-1 {
113			ts = append(ts, &Token{InlineComma, ","})
114		}
115	}
116	return ts
117}
118
119func (p path) has(prefix path) bool {
120	for i := range prefix {
121		if !reflect.DeepEqual(prefix[i], p[i]) {
122			return false
123		}
124	}
125	return true
126}
127
128type seen map[uintptr]path
129
130func (sn seen) circular(p path, v reflect.Value) []*Token {
131	switch v.Kind() {
132	case reflect.Ptr, reflect.Map, reflect.Slice:
133		ptr := v.Pointer()
134		if ptr == 0 {
135			return nil
136		}
137
138		if prev, has := sn[ptr]; has && p.has(prev) {
139			ts := []*Token{{Func, SymbolCircular}, {ParenOpen, "("}}
140			ts = append(ts, prev.tokens()...)
141			return append(ts, &Token{ParenClose, ")"}, &Token{Dot, "."},
142				&Token{ParenOpen, "("}, typeName(v.Type().String()), &Token{ParenClose, ")"})
143		}
144		sn[ptr] = p
145	}
146
147	return nil
148}
149
150func tokenize(sn seen, p path, v reflect.Value) []*Token {
151	if ts, has := tokenizeSpecial(v); has {
152		return ts
153	}
154
155	if ts := sn.circular(p, v); ts != nil {
156		return ts
157	}
158
159	t := &Token{Nil, ""}
160
161	switch v.Kind() {
162	case reflect.Interface:
163		return tokenize(sn, p, v.Elem())
164
165	case reflect.Bool:
166		t.Type = Bool
167		if v.Bool() {
168			t.Literal = "true"
169		} else {
170			t.Literal = "false"
171		}
172
173	case reflect.String:
174		return tokenizeString(v)
175
176	case reflect.Chan:
177		if v.Cap() == 0 {
178			return []*Token{{Func, "make"}, {ParenOpen, "("},
179				{Chan, "chan"}, typeName(v.Type().Elem().String()), {ParenClose, ")"},
180				{Comment, wrapComment(formatUintptr(v.Pointer()))}}
181		}
182		return []*Token{{Func, "make"}, {ParenOpen, "("}, {Chan, "chan"},
183			typeName(v.Type().Elem().String()), {InlineComma, ","},
184			{Number, strconv.FormatInt(int64(v.Cap()), 10)}, {ParenClose, ")"},
185			{Comment, wrapComment(formatUintptr(v.Pointer()))}}
186
187	case reflect.Func:
188		return []*Token{{ParenOpen, "("}, {TypeName, v.Type().String()},
189			{ParenClose, ")"}, {ParenOpen, "("}, {Nil, "nil"}, {ParenClose, ")"},
190			{Comment, wrapComment(formatUintptr(v.Pointer()))}}
191
192	case reflect.Ptr:
193		return tokenizePtr(sn, p, v)
194
195	case reflect.UnsafePointer:
196		return []*Token{typeName("unsafe.Pointer"), {ParenOpen, "("}, typeName("uintptr"),
197			{ParenOpen, "("}, {Number, formatUintptr(v.Pointer())}, {ParenClose, ")"}, {ParenClose, ")"}}
198
199	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
200		reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
201		reflect.Float32, reflect.Float64,
202		reflect.Uintptr, reflect.Complex64, reflect.Complex128:
203		return tokenizeNumber(v)
204
205	case reflect.Slice, reflect.Array, reflect.Map, reflect.Struct:
206		return tokenizeCollection(sn, p, v)
207	}
208
209	return []*Token{t}
210}
211
212func tokenizeSpecial(v reflect.Value) ([]*Token, bool) {
213	if v.Kind() == reflect.Invalid {
214		return []*Token{{Nil, "nil"}}, true
215	} else if r, ok := v.Interface().(rune); ok && unicode.IsGraphic(r) {
216		return []*Token{{Rune, strconv.QuoteRune(r)}}, true
217	} else if b, ok := v.Interface().(byte); ok {
218		return tokenizeByte(&Token{Nil, ""}, b), true
219	} else if t, ok := v.Interface().(time.Time); ok {
220		return tokenizeTime(t), true
221	} else if d, ok := v.Interface().(time.Duration); ok {
222		return tokenizeDuration(d), true
223	}
224
225	return tokenizeJSON(v)
226}
227
228func tokenizeCollection(sn seen, p path, v reflect.Value) []*Token {
229	ts := []*Token{}
230
231	switch v.Kind() {
232	case reflect.Slice, reflect.Array:
233		if data, ok := v.Interface().([]byte); ok {
234			ts = append(ts, tokenizeBytes(data)...)
235			break
236		} else {
237			ts = append(ts, typeName(v.Type().String()))
238		}
239		if v.Kind() == reflect.Slice && v.Cap() > 0 {
240			ts = append(ts, &Token{Comment, formatLenCap(v.Len(), v.Cap())})
241		}
242		ts = append(ts, &Token{SliceOpen, "{"})
243		for i := 0; i < v.Len(); i++ {
244			p := append(p, i)
245			el := v.Index(i)
246			ts = append(ts, &Token{SliceItem, ""})
247			ts = append(ts, tokenize(sn, p, el)...)
248			ts = append(ts, &Token{Comma, ","})
249		}
250		ts = append(ts, &Token{SliceClose, "}"})
251
252	case reflect.Map:
253		ts = append(ts, typeName(v.Type().String()))
254		keys := v.MapKeys()
255		sort.Slice(keys, func(i, j int) bool {
256			return compare(keys[i].Interface(), keys[j].Interface()) < 0
257		})
258		if len(keys) > 1 {
259			ts = append(ts, &Token{Comment, formatLenCap(len(keys), -1)})
260		}
261		ts = append(ts, &Token{MapOpen, "{"})
262		for _, k := range keys {
263			p := append(p, k.Interface())
264			ts = append(ts, &Token{MapKey, ""})
265			ts = append(ts, tokenize(sn, p, k)...)
266			ts = append(ts, &Token{Colon, ":"})
267			ts = append(ts, tokenize(sn, p, v.MapIndex(k))...)
268			ts = append(ts, &Token{Comma, ","})
269		}
270		ts = append(ts, &Token{MapClose, "}"})
271
272	case reflect.Struct:
273		t := v.Type()
274
275		ts = append(ts, typeName(t.String()))
276		ts = append(ts, &Token{StructOpen, "{"})
277		for i := 0; i < v.NumField(); i++ {
278			name := t.Field(i).Name
279			ts = append(ts, &Token{StructKey, ""})
280			ts = append(ts, &Token{StructField, name})
281
282			f := v.Field(i)
283			if !f.CanInterface() {
284				f = GetPrivateField(v, i)
285			}
286			ts = append(ts, &Token{Colon, ":"})
287			ts = append(ts, tokenize(sn, append(p, name), f)...)
288			ts = append(ts, &Token{Comma, ","})
289		}
290		ts = append(ts, &Token{StructClose, "}"})
291	}
292
293	return ts
294}
295
296func tokenizeNumber(v reflect.Value) []*Token {
297	t := &Token{Nil, ""}
298	ts := []*Token{}
299	tname := v.Type().String()
300
301	switch v.Kind() {
302	case reflect.Int:
303		t.Type = Number
304		t.Literal = strconv.FormatInt(v.Int(), 10)
305		if tname != "int" {
306			ts = append(ts, typeName(tname), &Token{ParenOpen, "("}, t, &Token{ParenClose, ")"})
307		} else {
308			ts = append(ts, t)
309		}
310
311	case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
312		ts = append(ts, typeName(tname), &Token{ParenOpen, "("})
313		t.Type = Number
314		t.Literal = strconv.FormatInt(v.Int(), 10)
315		ts = append(ts, t, &Token{ParenClose, ")"})
316
317	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
318		ts = append(ts, typeName(tname), &Token{ParenOpen, "("})
319		t.Type = Number
320		t.Literal = strconv.FormatUint(v.Uint(), 10)
321		ts = append(ts, t, &Token{ParenClose, ")"})
322
323	case reflect.Float32:
324		ts = append(ts, typeName(tname), &Token{ParenOpen, "("})
325		t.Type = Number
326		t.Literal = strconv.FormatFloat(v.Float(), 'f', -1, 32)
327		ts = append(ts, t, &Token{ParenClose, ")"})
328
329	case reflect.Float64:
330		t.Type = Number
331		t.Literal = strconv.FormatFloat(v.Float(), 'f', -1, 64)
332		if !strings.Contains(t.Literal, ".") {
333			t.Literal += ".0"
334		}
335		if tname != "float64" {
336			ts = append(ts, typeName(tname), &Token{ParenOpen, "("}, t, &Token{ParenClose, ")"})
337		} else {
338			ts = append(ts, t)
339		}
340
341	case reflect.Complex64:
342		ts = append(ts, typeName(tname), &Token{ParenOpen, "("})
343		t.Type = Number
344		t.Literal = strconv.FormatComplex(v.Complex(), 'f', -1, 64)
345		t.Literal = t.Literal[1 : len(t.Literal)-1]
346		ts = append(ts, t, &Token{ParenClose, ")"})
347
348	case reflect.Complex128:
349		t.Type = Number
350		t.Literal = strconv.FormatComplex(v.Complex(), 'f', -1, 128)
351		t.Literal = t.Literal[1 : len(t.Literal)-1]
352		if tname != "complex128" {
353			ts = append(ts, typeName(tname), &Token{ParenOpen, "("}, t, &Token{ParenClose, ")"})
354		} else {
355			ts = append(ts, t)
356		}
357
358	}
359
360	return ts
361}
362
363func tokenizeByte(t *Token, b byte) []*Token {
364	ts := []*Token{typeName("byte"), {ParenOpen, "("}}
365	r := rune(b)
366	if unicode.IsGraphic(r) {
367		ts = append(ts, &Token{Byte, strconv.QuoteRune(r)})
368	} else {
369		ts = append(ts, &Token{Byte, "0x" + strconv.FormatUint(uint64(b), 16)})
370	}
371	return append(ts, &Token{ParenClose, ")"})
372}
373
374func tokenizeTime(t time.Time) []*Token {
375	ext := GetPrivateFieldByName(reflect.ValueOf(t), "ext").Int()
376	ts := []*Token{{Func, SymbolTime}, {ParenOpen, "("}}
377	ts = append(ts, &Token{String, t.Format(time.RFC3339Nano)})
378	ts = append(ts, &Token{InlineComma, ","}, &Token{Number, strconv.FormatInt(ext, 10)}, &Token{ParenClose, ")"})
379	return ts
380}
381
382func tokenizeDuration(d time.Duration) []*Token {
383	ts := []*Token{}
384	ts = append(ts, typeName(SymbolDuration), &Token{ParenOpen, "("})
385	ts = append(ts, &Token{String, d.String()})
386	ts = append(ts, &Token{ParenClose, ")"})
387	return ts
388}
389
390func tokenizeString(v reflect.Value) []*Token {
391	s := v.String()
392	ts := []*Token{{String, s}}
393	if v.Len() >= LongStringLen {
394		ts = append(ts, &Token{Comment, formatLenCap(len(s), -1)})
395	}
396	return ts
397}
398
399func tokenizeBytes(data []byte) []*Token {
400	ts := []*Token{}
401
402	if utf8.Valid(data) {
403		s := string(data)
404		ts = append(ts, typeName("[]byte"), &Token{ParenOpen, "("})
405		ts = append(ts, &Token{String, s})
406		ts = append(ts, &Token{ParenClose, ")"})
407	} else {
408		ts = append(ts, &Token{Func, SymbolBase64}, &Token{ParenOpen, "("})
409		ts = append(ts, &Token{String, base64.StdEncoding.EncodeToString(data)})
410		ts = append(ts, &Token{ParenClose, ")"})
411	}
412	if len(data) >= LongBytesLen {
413		ts = append(ts, &Token{Comment, formatLenCap(len(data), -1)})
414	}
415	return ts
416}
417
418func tokenizePtr(sn seen, p path, v reflect.Value) []*Token {
419	ts := []*Token{}
420
421	if v.Elem().Kind() == reflect.Invalid {
422		ts = append(ts,
423			&Token{ParenOpen, "("}, typeName(v.Type().String()), &Token{ParenClose, ")"},
424			&Token{ParenOpen, "("}, &Token{Nil, "nil"}, &Token{ParenClose, ")"})
425		return ts
426	}
427
428	fn := false
429
430	switch v.Elem().Kind() {
431	case reflect.Struct, reflect.Map, reflect.Slice, reflect.Array:
432		if _, ok := v.Elem().Interface().([]byte); ok {
433			fn = true
434		}
435	default:
436		fn = true
437	}
438
439	if fn {
440		ts = append(ts, &Token{Func, SymbolPtr}, &Token{ParenOpen, "("})
441		ts = append(ts, tokenize(sn, p, v.Elem())...)
442		ts = append(ts, &Token{ParenClose, ")"}, &Token{Dot, "."}, &Token{ParenOpen, "("},
443			typeName(v.Type().String()), &Token{ParenClose, ")"})
444	} else {
445		ts = append(ts, &Token{And, "&"})
446		ts = append(ts, tokenize(sn, p, v.Elem())...)
447	}
448
449	return ts
450}
451
452func tokenizeJSON(v reflect.Value) ([]*Token, bool) {
453	var jv interface{}
454	ts := []*Token{}
455	s := ""
456	if v.Kind() == reflect.String {
457		s = v.String()
458		err := json.Unmarshal([]byte(s), &jv)
459		if err != nil {
460			return nil, false
461		}
462		ts = append(ts, &Token{Func, SymbolJSONStr})
463	} else if b, ok := v.Interface().([]byte); ok {
464		err := json.Unmarshal(b, &jv)
465		if err != nil {
466			return nil, false
467		}
468		s = string(b)
469		ts = append(ts, &Token{Func, SymbolJSONBytes})
470	}
471
472	_, isObj := jv.(map[string]interface{})
473	_, isArr := jv.(map[string]interface{})
474
475	if isObj || isArr {
476		ts = append(ts, &Token{ParenOpen, "("})
477		ts = append(ts, Tokenize(jv)...)
478		ts = append(ts, &Token{InlineComma, ","},
479			&Token{String, s}, &Token{ParenClose, ")"})
480		return ts, true
481	}
482
483	return nil, false
484}
485
486func typeName(t string) *Token {
487	return &Token{TypeName, t}
488}
489
Full Screen

Accelerate Your Automation Test Cycles With LambdaTest

Leverage LambdaTest’s cloud-based platform to execute your automation tests in parallel and trim down your test execution time significantly. Your first 100 automation testing minutes are on us.

Try LambdaTest
LambdaTestX

We use cookies to give you the best experience. Cookies help to provide a more personalized experience and relevant advertising for you, and web analytics for us. Learn More in our Cookies policy, Privacy & Terms of service

Allow Cookie
Sarah

I hope you find the best code examples for your project.

If you want to accelerate automated browser testing, try LambdaTest. Your first 100 automation testing minutes are FREE.

Sarah Elson (Product & Growth Lead)