1package dbus
2
3import (
4	"fmt"
5	"strings"
6	"unicode"
7	"unicode/utf8"
8)
9
10// Heavily inspired by the lexer from text/template.
11
12type varToken struct {
13	typ varTokenType
14	val string
15}
16
17type varTokenType byte
18
19const (
20	tokEOF varTokenType = iota
21	tokError
22	tokNumber
23	tokString
24	tokBool
25	tokArrayStart
26	tokArrayEnd
27	tokDictStart
28	tokDictEnd
29	tokVariantStart
30	tokVariantEnd
31	tokComma
32	tokColon
33	tokType
34	tokByteString
35)
36
37type varLexer struct {
38	input  string
39	start  int
40	pos    int
41	width  int
42	tokens []varToken
43}
44
45type lexState func(*varLexer) lexState
46
47func varLex(s string) []varToken {
48	l := &varLexer{input: s}
49	l.run()
50	return l.tokens
51}
52
53func (l *varLexer) accept(valid string) bool {
54	if strings.IndexRune(valid, l.next()) >= 0 {
55		return true
56	}
57	l.backup()
58	return false
59}
60
61func (l *varLexer) backup() {
62	l.pos -= l.width
63}
64
65func (l *varLexer) emit(t varTokenType) {
66	l.tokens = append(l.tokens, varToken{t, l.input[l.start:l.pos]})
67	l.start = l.pos
68}
69
70func (l *varLexer) errorf(format string, v ...interface{}) lexState {
71	l.tokens = append(l.tokens, varToken{
72		tokError,
73		fmt.Sprintf(format, v...),
74	})
75	return nil
76}
77
78func (l *varLexer) ignore() {
79	l.start = l.pos
80}
81
82func (l *varLexer) next() rune {
83	var r rune
84
85	if l.pos >= len(l.input) {
86		l.width = 0
87		return -1
88	}
89	r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
90	l.pos += l.width
91	return r
92}
93
94func (l *varLexer) run() {
95	for state := varLexNormal; state != nil; {
96		state = state(l)
97	}
98}
99
100func (l *varLexer) peek() rune {
101	r := l.next()
102	l.backup()
103	return r
104}
105
106func varLexNormal(l *varLexer) lexState {
107	for {
108		r := l.next()
109		switch {
110		case r == -1:
111			l.emit(tokEOF)
112			return nil
113		case r == '[':
114			l.emit(tokArrayStart)
115		case r == ']':
116			l.emit(tokArrayEnd)
117		case r == '{':
118			l.emit(tokDictStart)
119		case r == '}':
120			l.emit(tokDictEnd)
121		case r == '<':
122			l.emit(tokVariantStart)
123		case r == '>':
124			l.emit(tokVariantEnd)
125		case r == ':':
126			l.emit(tokColon)
127		case r == ',':
128			l.emit(tokComma)
129		case r == '\'' || r == '"':
130			l.backup()
131			return varLexString
132		case r == '@':
133			l.backup()
134			return varLexType
135		case unicode.IsSpace(r):
136			l.ignore()
137		case unicode.IsNumber(r) || r == '+' || r == '-':
138			l.backup()
139			return varLexNumber
140		case r == 'b':
141			pos := l.start
142			if n := l.peek(); n == '"' || n == '\'' {
143				return varLexByteString
144			}
145			// not a byte string; try to parse it as a type or bool below
146			l.pos = pos + 1
147			l.width = 1
148			fallthrough
149		default:
150			// either a bool or a type. Try bools first.
151			l.backup()
152			if l.pos+4 <= len(l.input) {
153				if l.input[l.pos:l.pos+4] == "true" {
154					l.pos += 4
155					l.emit(tokBool)
156					continue
157				}
158			}
159			if l.pos+5 <= len(l.input) {
160				if l.input[l.pos:l.pos+5] == "false" {
161					l.pos += 5
162					l.emit(tokBool)
163					continue
164				}
165			}
166			// must be a type.
167			return varLexType
168		}
169	}
170}
171
172var varTypeMap = map[string]string{
173	"boolean":    "b",
174	"byte":       "y",
175	"int16":      "n",
176	"uint16":     "q",
177	"int32":      "i",
178	"uint32":     "u",
179	"int64":      "x",
180	"uint64":     "t",
181	"double":     "f",
182	"string":     "s",
183	"objectpath": "o",
184	"signature":  "g",
185}
186
187func varLexByteString(l *varLexer) lexState {
188	q := l.next()
189Loop:
190	for {
191		switch l.next() {
192		case '\\':
193			if r := l.next(); r != -1 {
194				break
195			}
196			fallthrough
197		case -1:
198			return l.errorf("unterminated bytestring")
199		case q:
200			break Loop
201		}
202	}
203	l.emit(tokByteString)
204	return varLexNormal
205}
206
207func varLexNumber(l *varLexer) lexState {
208	l.accept("+-")
209	digits := "0123456789"
210	if l.accept("0") {
211		if l.accept("x") {
212			digits = "0123456789abcdefABCDEF"
213		} else {
214			digits = "01234567"
215		}
216	}
217	for strings.IndexRune(digits, l.next()) >= 0 {
218	}
219	l.backup()
220	if l.accept(".") {
221		for strings.IndexRune(digits, l.next()) >= 0 {
222		}
223		l.backup()
224	}
225	if l.accept("eE") {
226		l.accept("+-")
227		for strings.IndexRune("0123456789", l.next()) >= 0 {
228		}
229		l.backup()
230	}
231	if r := l.peek(); unicode.IsLetter(r) {
232		l.next()
233		return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
234	}
235	l.emit(tokNumber)
236	return varLexNormal
237}
238
239func varLexString(l *varLexer) lexState {
240	q := l.next()
241Loop:
242	for {
243		switch l.next() {
244		case '\\':
245			if r := l.next(); r != -1 {
246				break
247			}
248			fallthrough
249		case -1:
250			return l.errorf("unterminated string")
251		case q:
252			break Loop
253		}
254	}
255	l.emit(tokString)
256	return varLexNormal
257}
258
259func varLexType(l *varLexer) lexState {
260	at := l.accept("@")
261	for {
262		r := l.next()
263		if r == -1 {
264			break
265		}
266		if unicode.IsSpace(r) {
267			l.backup()
268			break
269		}
270	}
271	if at {
272		if _, err := ParseSignature(l.input[l.start+1 : l.pos]); err != nil {
273			return l.errorf("%s", err)
274		}
275	} else {
276		if _, ok := varTypeMap[l.input[l.start:l.pos]]; ok {
277			l.emit(tokType)
278			return varLexNormal
279		}
280		return l.errorf("unrecognized type %q", l.input[l.start:l.pos])
281	}
282	l.emit(tokType)
283	return varLexNormal
284}
285