Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] expand for loops in preprocessor #90

Draft
wants to merge 8 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 16 additions & 4 deletions compile.go
Original file line number Diff line number Diff line change
Expand Up @@ -451,10 +451,22 @@ func CompileWarrior(r io.Reader, config SimulatorConfig) (WarriorData, error) {
return WarriorData{}, err
}

// scanner := newSymbolScanner(newBufTokenReader(tokens))
// _, err = scanner.ScanInput()
// if err != nil {
// return WarriorData{}, fmt.Errorf("symbol scanner: %s", err)
// for {
// symbols, forSeen, err := ScanInput(newBufTokenReader(tokens))
// if err != nil {
// return WarriorData{}, fmt.Errorf("symbol scanner: %s", err)
// }
// if forSeen {
// expandedTokens, err := ForExpand(newBufTokenReader(tokens), symbols)
// if err != nil {
// return WarriorData{}, fmt.Errorf("for: %s", err)
// }
// tokens = expandedTokens
// // oops the embedded for loops are not implemented
// break
// } else {
// break
// }
// }

parser := newParser(newBufTokenReader(tokens))
Expand Down
28 changes: 28 additions & 0 deletions expr.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,34 @@ import (
"strconv"
)

func ExpandAndEvaluate(expr []token, symbols map[string][]token) (int, error) {
graph := buildReferenceGraph(symbols)

cyclic, key := graphContainsCycle(graph)
if cyclic {
return 0, fmt.Errorf("symbol graph contains cycles: %s", key)
}

resolved, err := expandExpressions(symbols, graph)
if err != nil {
return 0, err
}

expanded := make([]token, 0)
for _, tok := range expr {
if tok.typ == tokText {
symVal, ok := resolved[tok.val]
if ok {
expanded = append(expanded, symVal...)
continue
}
}
expanded = append(expanded, tok)
}

return evaluateExpression(expanded)
}

func expandValue(key string, values, resolved map[string][]token, graph map[string][]string) ([]token, error) {
// load key value or error
value, valOk := values[key]
Expand Down
336 changes: 336 additions & 0 deletions forexpand.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,336 @@
package gmars

import (
"fmt"
"strings"
)

type forExpander struct {
lex tokenReader

// lexing state fields
nextToken token
labelBuf []string
exprBuf []token
atEOF bool

// for state fields
forCountLabel string
forLineLabels []string
forCount int
forIndex int
forContent []token

symbols map[string][]token

// output fields
tokens chan token
closed bool
err error
}

type forStateFn func(f *forExpander) forStateFn

func newForExpander(lex tokenReader, symbols map[string][]token) *forExpander {
f := &forExpander{lex: lex, symbols: symbols}
f.next()
f.tokens = make(chan token)
go f.run()
return f
}

func ForExpand(lex tokenReader, symbols map[string][]token) ([]token, error) {
expander := newForExpander(lex, symbols)
tokens, err := expander.Tokens()
if err != nil {
return nil, err
}
return tokens, nil
}

func (p *forExpander) next() token {
if p.atEOF {
return token{typ: tokEOF}
}
tok, err := p.lex.NextToken()
if err != nil {
p.atEOF = true
return token{tokError, fmt.Sprintf("%s\n", err)}
}
if tok.typ == tokEOF || tok.typ == tokError {
p.atEOF = true
}
retTok := p.nextToken
p.nextToken = tok
return retTok
}

func (f *forExpander) run() {
if f.closed || f.atEOF {
return
}
for state := forLine; state != nil; {
state = state(f)
}
// add an extra EOF in case we end without one
// we don't want to block on reading from the channel
f.tokens <- token{tokEOF, ""}
f.closed = true
}

func (f *forExpander) NextToken() (token, error) {
if f.closed {
return token{}, fmt.Errorf("no more tokens")
}
return <-f.tokens, nil
}

func (f *forExpander) Tokens() ([]token, error) {
if f.closed {
return nil, fmt.Errorf("no more tokens")
}
tokens := make([]token, 0)
for !f.closed {
tok := <-f.tokens
tokens = append(tokens, tok)
if tok.typ == tokEOF || tok.typ == tokError {
break
}
}
return tokens, nil
}

func (f *forExpander) emitConsume(nextState forStateFn) forStateFn {
f.tokens <- f.nextToken
f.next()
return nextState
}

// forLine is the base state and returned to after every newline outside a for loop
// text: forConsumeLabels
// anything else: forConsumeLine
func forLine(f *forExpander) forStateFn {
switch f.nextToken.typ {
case tokText:
f.labelBuf = make([]string, 0)
return forConsumeLabels
default:
return f.emitConsume(forConsumeEmitLine)
}
}

// consume labels into labelBuf and go to next state
// text "for": forFor
// text op/pseudo: forWriteLabelsConsumeLine
// text other: append to labelBuf, forConsumeLabels
// newline/comment: forConsumeLabels
// other: nil
func forConsumeLabels(f *forExpander) forStateFn {
if f.nextToken.typ == tokText {

if f.nextToken.IsPseudoOp() {
opLower := strings.ToLower(f.nextToken.val)
if opLower == "for" {
f.next()
f.exprBuf = make([]token, 0)
return forConsumeExpression
} else {
return forWriteLabelsEmitConsumeLine
}
} else if f.nextToken.IsOp() {
return forWriteLabelsEmitConsumeLine
} else {
f.labelBuf = append(f.labelBuf, f.nextToken.val)
f.next()
return forConsumeLabels
}
} else if f.nextToken.typ == tokNewline || f.nextToken.typ == tokComment {
f.next()
return forConsumeLabels
} else {
f.err = fmt.Errorf("expected label, op, newlines, or comment, got '%s'", f.nextToken)
return nil
}
}

// forWriteLabelsEmitConsumeLine writes all the stored labels to the token channel,
// emits the current nextToken and returns forConsumeLine
func forWriteLabelsEmitConsumeLine(f *forExpander) forStateFn {
for _, label := range f.labelBuf {
f.tokens <- token{tokText, label}
}
f.labelBuf = make([]string, 0)
return f.emitConsume(forConsumeEmitLine)
}

// forConsumeEmitLine consumes and emits tokens until a newline is reached
// the newline is consumed and emitted before calling forLine
func forConsumeEmitLine(f *forExpander) forStateFn {
switch f.nextToken.typ {
case tokNewline:
return f.emitConsume(forLine)
case tokError:
return f.emitConsume(nil)
case tokEOF:
return f.emitConsume(nil)
default:
return f.emitConsume(forConsumeEmitLine)
}
}

// forConsumeExpressions consumes tokens into the exprBuf until
// a newline is reached then returns forInnerLine after consuming
// the newline to
// newline: forFor
// error: emit, nil
// eof: nil
// otherwise: forConsumeExpression
func forConsumeExpression(f *forExpander) forStateFn {
switch f.nextToken.typ {
case tokNewline:
f.next()
return forFor
case tokError:
return f.emitConsume(nil)
case tokEOF:
return nil
default:
// f.tokens <- f.nextToken
f.exprBuf = append(f.exprBuf, f.nextToken)
f.next()
return forConsumeExpression
}
}

// input: exprBuf from forConsumeExpression
// evaluates count expression and sets up for state
// always returns forInnerLine or Error
func forFor(f *forExpander) forStateFn {
expr := make([]token, 0, len(f.exprBuf))
for _, token := range f.exprBuf {
if token.typ == tokEOF || token.typ == tokError {
f.err = fmt.Errorf("unexpected expression term: %s", token)
}
expr = append(expr, token)
}
f.exprBuf = expr

val, err := ExpandAndEvaluate(f.exprBuf, f.symbols)
if err != nil {
f.tokens <- token{tokError, fmt.Sprintf("%s", err)}
return nil
}

if len(f.labelBuf) > 0 {
f.forCountLabel = f.labelBuf[len(f.labelBuf)-1]
if len(f.labelBuf) > 1 {
f.forLineLabels = f.labelBuf[:len(f.labelBuf)-1]
} else {
f.forLineLabels = []string{}
}
} else {
f.forCountLabel = ""
f.forLineLabels = []string{}
}

f.forCount = val
f.forIndex = 0 // should not be necessary
f.forContent = make([]token, 0)
f.labelBuf = make([]string, 0)

return forInnerLine
}

// text: forInnerConsumeLabels
// other: forInnerConsumeLine
func forInnerLine(f *forExpander) forStateFn {
switch f.nextToken.typ {
case tokText:
return forInnerLabels
default:
// emitconsume line into for buffer
return forInnerEmitConsumeLine
}
}

// this is really just to drop labels before 'rof'
func forInnerLabels(f *forExpander) forStateFn {
switch f.nextToken.typ {
case tokText:
if f.nextToken.IsPseudoOp() && strings.ToLower(f.nextToken.val) == "rof" {
// just call to emit the buffer

return forRof
} else if f.nextToken.IsOp() {
// write labels and op into emit buffer then emitcomsume line
return forInnerEmitLabels
} else {
f.labelBuf = append(f.labelBuf, f.nextToken.val)
f.next()
return forInnerLabels
}
default:
return forInnerEmitLabels
// emit labels and emitconsume line into for buffer
}
}

func forInnerEmitLabels(f *forExpander) forStateFn {
for _, label := range f.labelBuf {
f.tokens <- token{tokText, label}
}
return forInnerEmitConsumeLine
}

func forInnerEmitConsumeLine(f *forExpander) forStateFn {
switch f.nextToken.typ {
case tokError:
// TODO
return nil
case tokEOF:
return nil
case tokNewline:
// f.tokens <- f.nextToken
f.forContent = append(f.forContent, f.nextToken)
f.next()
return forInnerLine
default:
f.forContent = append(f.forContent, f.nextToken)
f.next()
return forInnerEmitConsumeLine
}
}

func forRof(f *forExpander) forStateFn {
for f.nextToken.typ != tokNewline {
if f.nextToken.typ == tokEOF || f.nextToken.typ == tokError {
return nil
}
f.next()
}
f.next()

for i := 1; i <= f.forCount; i++ {
for _, tok := range f.forContent {
if tok.typ == tokText {
if tok.val == f.forCountLabel {
f.tokens <- token{tokNumber, fmt.Sprintf("%d", i)}
} else {
f.tokens <- tok
}
} else {
f.tokens <- tok
}
}
}

return forEmitConsumeStream
}

func forEmitConsumeStream(f *forExpander) forStateFn {
for f.nextToken.typ != tokEOF {
f.tokens <- f.nextToken
f.next()
}
return nil
}
Loading