123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285 |
- package dbus
- import (
- "fmt"
- "strings"
- "unicode"
- "unicode/utf8"
- )
- // Heavily inspired by the lexer from text/template.
- type varToken struct {
- typ varTokenType
- val string
- }
- type varTokenType byte
- const (
- tokEOF varTokenType = iota
- tokError
- tokNumber
- tokString
- tokBool
- tokArrayStart
- tokArrayEnd
- tokDictStart
- tokDictEnd
- tokVariantStart
- tokVariantEnd
- tokComma
- tokColon
- tokType
- tokByteString
- )
- type varLexer struct {
- input string
- start int
- pos int
- width int
- tokens []varToken
- }
- type lexState func(*varLexer) lexState
- func varLex(s string) []varToken {
- l := &varLexer{input: s}
- l.run()
- return l.tokens
- }
- func (l *varLexer) accept(valid string) bool {
- if strings.IndexRune(valid, l.next()) >= 0 {
- return true
- }
- l.backup()
- return false
- }
- func (l *varLexer) backup() {
- l.pos -= l.width
- }
- func (l *varLexer) emit(t varTokenType) {
- l.tokens = append(l.tokens, varToken{t, l.input[l.start:l.pos]})
- l.start = l.pos
- }
- func (l *varLexer) errorf(format string, v ...interface{}) lexState {
- l.tokens = append(l.tokens, varToken{
- tokError,
- fmt.Sprintf(format, v...),
- })
- return nil
- }
- func (l *varLexer) ignore() {
- l.start = l.pos
- }
- func (l *varLexer) next() rune {
- var r rune
- if l.pos >= len(l.input) {
- l.width = 0
- return -1
- }
- r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
- l.pos += l.width
- return r
- }
- func (l *varLexer) run() {
- for state := varLexNormal; state != nil; {
- state = state(l)
- }
- }
- func (l *varLexer) peek() rune {
- r := l.next()
- l.backup()
- return r
- }
- func varLexNormal(l *varLexer) lexState {
- for {
- r := l.next()
- switch {
- case r == -1:
- l.emit(tokEOF)
- return nil
- case r == '[':
- l.emit(tokArrayStart)
- case r == ']':
- l.emit(tokArrayEnd)
- case r == '{':
- l.emit(tokDictStart)
- case r == '}':
- l.emit(tokDictEnd)
- case r == '<':
- l.emit(tokVariantStart)
- case r == '>':
- l.emit(tokVariantEnd)
- case r == ':':
- l.emit(tokColon)
- case r == ',':
- l.emit(tokComma)
- case r == '\'' || r == '"':
- l.backup()
- return varLexString
- case r == '@':
- l.backup()
- return varLexType
- case unicode.IsSpace(r):
- l.ignore()
- case unicode.IsNumber(r) || r == '+' || r == '-':
- l.backup()
- return varLexNumber
- case r == 'b':
- pos := l.start
- if n := l.peek(); n == '"' || n == '\'' {
- return varLexByteString
- }
- // not a byte string; try to parse it as a type or bool below
- l.pos = pos + 1
- l.width = 1
- fallthrough
- default:
- // either a bool or a type. Try bools first.
- l.backup()
- if l.pos+4 <= len(l.input) {
- if l.input[l.pos:l.pos+4] == "true" {
- l.pos += 4
- l.emit(tokBool)
- continue
- }
- }
- if l.pos+5 <= len(l.input) {
- if l.input[l.pos:l.pos+5] == "false" {
- l.pos += 5
- l.emit(tokBool)
- continue
- }
- }
- // must be a type.
- return varLexType
- }
- }
- }
- var varTypeMap = map[string]string{
- "boolean": "b",
- "byte": "y",
- "int16": "n",
- "uint16": "q",
- "int32": "i",
- "uint32": "u",
- "int64": "x",
- "uint64": "t",
- "double": "f",
- "string": "s",
- "objectpath": "o",
- "signature": "g",
- }
- func varLexByteString(l *varLexer) lexState {
- q := l.next()
- Loop:
- for {
- switch l.next() {
- case '\\':
- if r := l.next(); r != -1 {
- break
- }
- fallthrough
- case -1:
- return l.errorf("unterminated bytestring")
- case q:
- break Loop
- }
- }
- l.emit(tokByteString)
- return varLexNormal
- }
- func varLexNumber(l *varLexer) lexState {
- l.accept("+-")
- digits := "0123456789"
- if l.accept("0") {
- if l.accept("x") {
- digits = "0123456789abcdefABCDEF"
- } else {
- digits = "01234567"
- }
- }
- for strings.IndexRune(digits, l.next()) >= 0 {
- }
- l.backup()
- if l.accept(".") {
- for strings.IndexRune(digits, l.next()) >= 0 {
- }
- l.backup()
- }
- if l.accept("eE") {
- l.accept("+-")
- for strings.IndexRune("0123456789", l.next()) >= 0 {
- }
- l.backup()
- }
- if r := l.peek(); unicode.IsLetter(r) {
- l.next()
- return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
- }
- l.emit(tokNumber)
- return varLexNormal
- }
- func varLexString(l *varLexer) lexState {
- q := l.next()
- Loop:
- for {
- switch l.next() {
- case '\\':
- if r := l.next(); r != -1 {
- break
- }
- fallthrough
- case -1:
- return l.errorf("unterminated string")
- case q:
- break Loop
- }
- }
- l.emit(tokString)
- return varLexNormal
- }
- func varLexType(l *varLexer) lexState {
- at := l.accept("@")
- for {
- r := l.next()
- if r == -1 {
- break
- }
- if unicode.IsSpace(r) {
- l.backup()
- break
- }
- }
- if at {
- if _, err := ParseSignature(l.input[l.start+1 : l.pos]); err != nil {
- return l.errorf("%s", err)
- }
- } else {
- if _, ok := varTypeMap[l.input[l.start:l.pos]]; ok {
- l.emit(tokType)
- return varLexNormal
- }
- return l.errorf("unrecognized type %q", l.input[l.start:l.pos])
- }
- l.emit(tokType)
- return varLexNormal
- }
|