forked from alecthomas/kingpin
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlexer.go
96 lines (82 loc) · 1.65 KB
/
lexer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
package kingpin
import "strings"
type TokenType int
// Token types.
const (
TokenShort TokenType = iota
TokenLong
TokenArg
TokenEOL
)
var (
TokenEOLMarker = Token{TokenEOL, ""}
)
type Token struct {
Type TokenType
Value string
}
func (t *Token) IsFlag() bool {
return t.Type == TokenShort || t.Type == TokenLong
}
func (t *Token) IsEOF() bool {
return t.Type == TokenEOL
}
func (t *Token) String() string {
switch t.Type {
case TokenShort:
return "-" + t.Value
case TokenLong:
return "--" + t.Value
case TokenArg:
return t.Value
case TokenEOL:
return "<EOL>"
default:
panic("unhandled type")
}
}
type Tokens []*Token
func (t Tokens) String() string {
out := []string{}
for _, tok := range t {
out = append(out, tok.String())
}
return strings.Join(out, " ")
}
func (t Tokens) Next() Tokens {
if len(t) == 0 {
return nil
}
return t[1:]
}
func (t Tokens) Return(token *Token) Tokens {
if token.Type == TokenEOL {
return t
}
return append(Tokens{token}, t...)
}
func (t Tokens) Peek() *Token {
if len(t) == 0 {
return &TokenEOLMarker
}
return t[0]
}
func Tokenize(args []string) *ParseContext {
tokens := make(Tokens, 0, len(args))
for _, arg := range args {
if strings.HasPrefix(arg, "--") {
parts := strings.SplitN(arg[2:], "=", 2)
tokens = append(tokens, &Token{TokenLong, parts[0]})
if len(parts) == 2 {
tokens = append(tokens, &Token{TokenArg, parts[1]})
}
} else if strings.HasPrefix(arg, "-") {
for _, a := range arg[1:] {
tokens = append(tokens, &Token{TokenShort, string(a)})
}
} else {
tokens = append(tokens, &Token{TokenArg, arg})
}
}
return &ParseContext{Tokens: tokens}
}