forked from torbiak/gopl
-
Notifications
You must be signed in to change notification settings - Fork 0
/
decode.go
177 lines (161 loc) · 4.56 KB
/
decode.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
package sexpr
import (
"bytes"
"fmt"
"reflect"
"strconv"
"strings"
"text/scanner"
)
var Interfaces map[string]reflect.Type
// Unmarshal parses S-expression data and populates the variable
// whose address is in the non-nil pointer out.
func Unmarshal(data []byte, out interface{}) (err error) {
lex := &lexer{scan: scanner.Scanner{Mode: scanner.GoTokens}}
lex.scan.Init(bytes.NewReader(data))
lex.next() // get the first token
defer func() {
// NOTE: this is not an example of ideal error handling.
if x := recover(); x != nil {
err = fmt.Errorf("error at %s: %v", lex.scan.Position, x)
}
}()
read(lex, reflect.ValueOf(out).Elem())
return nil
}
type lexer struct {
scan scanner.Scanner
token rune // the current token
}
func (lex *lexer) next() { lex.token = lex.scan.Scan() }
func (lex *lexer) text() string { return lex.scan.TokenText() }
func (lex *lexer) consume(want rune) {
if lex.token != want { // NOTE: Not an example of good error handling.
panic(fmt.Sprintf("got %q, want %q", lex.text(), want))
}
lex.next()
}
// The read function is a decoder for a small subset of well-formed
// S-expressions. For brevity of our example, it takes many dubious
// shortcuts.
//
// The parser assumes
// - that the S-expression input is well-formed; it does no error checking.
// - that the S-expression input corresponds to the type of the variable.
// - that all numbers in the input are non-negative decimal integers.
// - that all keys in ((key value) ...) struct syntax are unquoted symbols.
// - that the input does not contain dotted lists such as (1 2 . 3).
// - that the input does not contain Lisp reader macros such 'x and #'x.
//
// The reflection logic assumes
// - that v is always a variable of the appropriate type for the
// S-expression value. For example, v must not be a boolean,
// interface, channel, or function, and if v is an array, the input
// must have the correct number of elements.
// - that v in the top-level call to read has the zero value of its
// type and doesn't need clearing.
// - that if v is a numeric variable, it is a signed integer.
func read(lex *lexer, v reflect.Value) {
switch lex.token {
case scanner.Ident:
// The only valid identifiers are "nil", "t", and struct field names.
switch lex.text() {
case "nil":
v.Set(reflect.Zero(v.Type()))
lex.next()
return
case "t":
v.SetBool(true)
lex.next()
return
}
case scanner.String:
s, _ := strconv.Unquote(lex.text()) // NOTE: ignoring errors
v.SetString(s)
lex.next()
return
case scanner.Int:
i, _ := strconv.Atoi(lex.text()) // NOTE: ignoring errors
switch v.Type().Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
v.SetInt(int64(i))
lex.next()
return
case reflect.Float32, reflect.Float64:
v.SetFloat(float64(i))
lex.next()
return
}
case scanner.Float:
f, _ := strconv.ParseFloat(lex.text(), 64) // NOTE: ignoring errors
v.SetFloat(float64(f))
lex.next()
return
case '(':
lex.next()
readList(lex, v)
lex.next() // consume ')'
return
}
panic(fmt.Sprintf("unexpected token %q", lex.text()))
}
func readList(lex *lexer, v reflect.Value) {
switch v.Kind() {
case reflect.Array: // (item ...)
for i := 0; !endList(lex); i++ {
read(lex, v.Index(i))
}
case reflect.Slice: // (item ...)
for !endList(lex) {
item := reflect.New(v.Type().Elem()).Elem()
read(lex, item)
v.Set(reflect.Append(v, item))
}
case reflect.Struct: // ((name value) ...)
for !endList(lex) {
lex.consume('(')
if lex.token != scanner.Ident {
panic(fmt.Sprintf("got token %q, want field name", lex.text()))
}
name := lex.text()
lex.next()
read(lex, v.FieldByName(name))
lex.consume(')')
}
case reflect.Map: // ((key value) ...)
v.Set(reflect.MakeMap(v.Type()))
for !endList(lex) {
lex.consume('(')
key := reflect.New(v.Type().Key()).Elem()
read(lex, key)
value := reflect.New(v.Type().Elem()).Elem()
read(lex, value)
v.SetMapIndex(key, value)
lex.consume(')')
}
case reflect.Interface: // (name value)
name := strings.Trim(lex.text(), `"`)
lex.next()
typ, ok := Interfaces[name]
if !ok {
panic(fmt.Sprintf("no concrete type registered for interface %s", name))
}
val := reflect.New(typ)
read(lex, reflect.Indirect(val))
v.Set(reflect.Indirect(val))
default:
panic(fmt.Sprintf("cannot decode list into %v", v.Type()))
}
}
func endList(lex *lexer) bool {
switch lex.token {
case scanner.EOF:
panic("end of file")
case ')':
return true
}
return false
}
func init() {
Interfaces = make(map[string]reflect.Type)
}