-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathscanner_test.go
More file actions
110 lines (108 loc) · 2.47 KB
/
scanner_test.go
File metadata and controls
110 lines (108 loc) · 2.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
package sham
import (
"reflect"
"testing"
)
func TestTokenize(t *testing.T) {
tests := []struct {
name string
source []byte
want []Token
wantErr bool
}{
{
name: "Empty input",
source: []byte(``),
want: []Token{},
wantErr: false,
},
{
name: "Tokenize array of objects",
source: []byte(`[{"a": 123, "b": true, "c": "def", "g": -1.56e-6, "h":name}] `),
want: []Token{
{Type: TokLBracket, Value: "["},
{Type: TokLBrace, Value: "{"},
{Type: TokString, Value: "a"},
{Type: TokColon, Value: ":"},
{Type: TokInteger, Value: "123"},
{Type: TokComma, Value: ","},
{Type: TokString, Value: "b"},
{Type: TokColon, Value: ":"},
{Type: TokTrue, Value: "true"},
{Type: TokComma, Value: ","},
{Type: TokString, Value: "c"},
{Type: TokColon, Value: ":"},
{Type: TokString, Value: "def"},
{Type: TokComma, Value: ","},
{Type: TokString, Value: "g"},
{Type: TokColon, Value: ":"},
{Type: TokFloat, Value: "-1.56e-6"},
{Type: TokComma, Value: ","},
{Type: TokString, Value: "h"},
{Type: TokColon, Value: ":"},
{Type: TokIdent, Value: "name"},
{Type: TokRBrace, Value: "}"},
{Type: TokRBracket, Value: "]"},
},
wantErr: false,
},
{
name: "Tokenize range",
source: []byte(`(1, 2)`),
want: []Token{
{Type: TokLParen, Value: "("},
{Type: TokInteger, Value: "1"},
{Type: TokComma, Value: ","},
{Type: TokInteger, Value: "2"},
{Type: TokRParen, Value: ")"},
},
wantErr: false,
},
{
name: "Tokenize fstring",
source: []byte("`foo ${bar}`"),
want: []Token{
{Type: TokFString, Value: "foo ${bar}"},
},
wantErr: false,
},
{
name: "Tokenize regex",
source: []byte(`/^abc123.*\/(|)$/`),
want: []Token{
{Type: TokRegex, Value: `^abc123.*\/(|)$`},
},
wantErr: false,
},
{
name: "Unterminated string",
source: []byte(`"abc`),
want: nil,
wantErr: true,
},
{
name: "Unterminated regex",
source: []byte(`/ab`),
want: nil,
wantErr: true,
},
{
name: "Unknown token",
source: []byte(`{}>`),
want: nil,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := Tokenize(tt.source)
if (err != nil) != tt.wantErr {
t.Errorf("Tokenize() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("Tokenize() = %v, want %v", got, tt.want)
}
})
}
}