Skip to content

Commit

Permalink
Add bufTokenReader, various 94 parser fixes (#84)
Browse files Browse the repository at this point in the history
* lex: add bufTokenReader to recycle lexed token buffer

* fix index panic in 94 loading empty strategy lines

* cmd/compile_test: add exception for nop

* fix (some) end label line values

* add compile_test to .gitignore
  • Loading branch information
bobertlo authored Nov 21, 2024
1 parent 671638e commit c1a2662
Show file tree
Hide file tree
Showing 8 changed files with 88 additions and 5 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
/vmars
/gmars
/compile_test
2 changes: 2 additions & 0 deletions asm.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ func (o OpCode) String() string {
return "DJN"
case SPL:
return "SPL"
case NOP:
return "NOP"
default:
return "???"
}
Expand Down
8 changes: 8 additions & 0 deletions cmd/compile_test/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,14 @@ func main() {
instructionsMatch := true
for i, inst := range in.Code {
if expected.Code[i] != inst {
if inst.Op == gmars.NOP {
ex := expected.Code[i]
if inst.OpMode == gmars.B && ex.OpMode == gmars.F {
if inst.Op == ex.Op && inst.AMode == ex.AMode && inst.A == ex.A && inst.BMode == ex.BMode && inst.B == ex.B {
continue
}
}
}
fmt.Printf("%s: instruction mismatch: '%s' != '%s'\n", inPath, inst, expected.Code[i])
instructionsMatch = false
}
Expand Down
4 changes: 2 additions & 2 deletions compile.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ func (c *compiler) loadSymbols() {
func (c *compiler) reloadReferences() error {
c.labels = make(map[string]int)

curPseudoLine := 1
var curPseudoLine int
for _, line := range c.lines {
if line.typ == lineInstruction {
for _, label := range line.labels {
Expand All @@ -87,7 +87,7 @@ func (c *compiler) reloadReferences() error {
return fmt.Errorf("line %d: label '%s' redefined", line.line, label)
}
c.labels[label] = line.codeLine
curPseudoLine++
curPseudoLine = line.codeLine + 1
}
} else if line.typ == linePseudoOp {
for _, label := range line.labels {
Expand Down
43 changes: 43 additions & 0 deletions lex.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,13 @@ import (
"unicode"
)

// tokenReader defines an interface shared between the stream based lexer
// and a bufTokenReader to cache tokens in memory.
type tokenReader interface {
NextToken() (token, error)
Tokens() ([]token, error)
}

type lexer struct {
reader *bufio.Reader
nextRune rune
Expand All @@ -15,6 +22,37 @@ type lexer struct {
tokens chan token
}

// butTokenReader implements the same interface as a streaming parser to let
// us cache and reuse the token stream instead of making multiple passes with
// the lexer
type bufTokenReader struct {
tokens []token
i int
}

func newBufTokenReader(tokens []token) *bufTokenReader {
return &bufTokenReader{tokens: tokens}
}

func (r *bufTokenReader) NextToken() (token, error) {
if r.i >= len(r.tokens) {
return token{}, fmt.Errorf("no more tokens")
}
next := r.tokens[r.i]
r.i++
return next, nil
}

func (r *bufTokenReader) Tokens() ([]token, error) {
if r.i >= len(r.tokens) {
return nil, fmt.Errorf("no more tokens")
}
subslice := r.tokens[r.i:]
ret := make([]token, len(subslice))
copy(subslice, ret)
return ret, nil
}

type lexStateFn func(l *lexer) lexStateFn

func newLexer(r io.Reader) *lexer {
Expand Down Expand Up @@ -221,3 +259,8 @@ func lexComment(l *lexer) lexStateFn {
l.tokens <- token{typ: tokComment, val: string(commentBuf)}
return lexInput
}

func LexInput(r io.Reader) ([]token, error) {
lexer := newLexer(r)
return lexer.Tokens()
}
13 changes: 13 additions & 0 deletions lex_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -145,3 +145,16 @@ func TestLexEnd(t *testing.T) {
assert.True(t, eof)
assert.Equal(t, r, '\x00')
}

func TestBufTokenReader(t *testing.T) {
in := strings.NewReader("dat 0, 0\n")
lexer := newLexer(in)
tokens, err := lexer.Tokens()
require.NoError(t, err)

bReader := newBufTokenReader(tokens)
bTokens, err := bReader.Tokens()
require.NoError(t, err)

require.Equal(t, tokens, bTokens)
}
14 changes: 14 additions & 0 deletions load_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,20 @@ func TestValidInput(t *testing.T) {
}
}

func TestValidInput94(t *testing.T) {
// random inputs that are valid but not worth validating output
cases := []string{
"ADD.BA $ 1, $ 1\n",
}

config := ConfigNOP94
for i, testCase := range cases {
reader := strings.NewReader(testCase)
_, err := ParseLoadFile(reader, config)
assert.NoError(t, err, "test: %d' '%s'", i, testCase)
}
}

func TestInvalidInput(t *testing.T) {
// random inputs that will throw an error
cases := []string{
Expand Down
8 changes: 5 additions & 3 deletions parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ func (line sourceLine) subSymbol(label string, value []token) sourceLine {
}

type parser struct {
lex *lexer
lex tokenReader

// state for the running parser
nextToken token
Expand All @@ -71,7 +71,7 @@ type parser struct {
references map[string]int
}

func newParser(lex *lexer) *parser {
func newParser(lex tokenReader) *parser {
p := &parser{
lex: lex,
symbols: make(map[string]int),
Expand Down Expand Up @@ -199,7 +199,9 @@ func parseLine(p *parser) parseStateFn {
} else if strings.HasPrefix(p.nextToken.val, ";author") {
p.metadata.Author = strings.TrimSpace(p.nextToken.val[7:])
} else if strings.HasPrefix(p.nextToken.val, ";strategy") {
p.metadata.Strategy += p.nextToken.val[10:] + "\n"
if len(p.nextToken.val) > 10 {
p.metadata.Strategy += p.nextToken.val[10:] + "\n"
}
}
p.currentLine.typ = lineComment
return parseComment
Expand Down

0 comments on commit c1a2662

Please sign in to comment.