From aadb9215f39adeb23f88c7830e736945d84791b9 Mon Sep 17 00:00:00 2001 From: Robert Lowry Date: Sun, 24 Nov 2024 14:50:42 -0600 Subject: [PATCH 1/8] sybol_scanner: stop and report forSeen on for input --- compile.go | 18 +++++++++++++----- symbol_scanner.go | 36 +++++++++++++++--------------------- symbol_scanner_test.go | 2 +- 3 files changed, 29 insertions(+), 27 deletions(-) diff --git a/compile.go b/compile.go index 2583b09..8bf608a 100644 --- a/compile.go +++ b/compile.go @@ -451,11 +451,19 @@ func CompileWarrior(r io.Reader, config SimulatorConfig) (WarriorData, error) { return WarriorData{}, err } - // scanner := newSymbolScanner(newBufTokenReader(tokens)) - // _, err = scanner.ScanInput() - // if err != nil { - // return WarriorData{}, fmt.Errorf("symbol scanner: %s", err) - // } + for { + _, forSeen, err := ScanInput(newBufTokenReader(tokens)) + if err != nil { + return WarriorData{}, fmt.Errorf("symbol scanner: %s", err) + } + if forSeen { + // tokens := ForExpand(newBufTokenReader(tokens), symbols) + // we will just break here for now or else it is an infinite loop + break + } else { + break + } + } parser := newParser(newBufTokenReader(tokens)) sourceLines, metadata, err := parser.parse() diff --git a/symbol_scanner.go b/symbol_scanner.go index 89eef1b..f4dcea6 100644 --- a/symbol_scanner.go +++ b/symbol_scanner.go @@ -16,7 +16,7 @@ type symbolScanner struct { atEOF bool valBuf []token labelBuf []string - forLevel int + forSeen bool err error symbols map[string][]token @@ -52,17 +52,22 @@ func (p *symbolScanner) next() token { return retTok } -// run the preprocessor -func (p *symbolScanner) ScanInput() (map[string][]token, error) { +func ScanInput(lex tokenReader) (map[string][]token, bool, error) { + scanner := newSymbolScanner(lex) + return scanner.ScanInput() +} + +func (p *symbolScanner) ScanInput() (map[string][]token, bool, error) { for state := scanLine; state != nil; { state = state(p) } if p.err != nil { - return nil, p.err + return nil, false, p.err } - return p.symbols, nil + return p.symbols, p.forSeen, nil } +// consume the current nextToken and go to nextState unless EOF func (p *symbolScanner) consume(nextState scanStateFn) scanStateFn { p.next() if p.nextToken.typ == tokEOF { @@ -95,24 +100,13 @@ func scanLabels(p *symbolScanner) scanStateFn { opLower := strings.ToLower(p.nextToken.val) switch opLower { case "equ": - if p.forLevel == 0 { - p.valBuf = make([]token, 0) - return p.consume(scanEquValue) - } + p.valBuf = make([]token, 0) + return p.consume(scanEquValue) case "for": - p.forLevel++ - return scanConsumeLine - case "rof": - if p.forLevel > 0 { - p.forLevel-- - } - return scanConsumeLine + p.forSeen = true + return nil case "end": - if p.forLevel > 1 { - return scanConsumeLine - } else { - return nil - } + return nil default: return scanConsumeLine } diff --git a/symbol_scanner_test.go b/symbol_scanner_test.go index 8081ef4..1f30283 100644 --- a/symbol_scanner_test.go +++ b/symbol_scanner_test.go @@ -19,7 +19,7 @@ func runSymbolScannerTests(t *testing.T, cases []symbolScannerTestCase) { require.NotNil(t, tokens) scanner := newSymbolScanner(newBufTokenReader(tokens)) - symbols, err := scanner.ScanInput() + symbols, _, err := scanner.ScanInput() require.NoError(t, err) require.NotNil(t, symbols) From 9ca48e8dcf0099a663e6641e9c75199f7e55b9c8 Mon Sep 17 00:00:00 2001 From: Robert Lowry Date: Sun, 24 Nov 2024 19:55:48 -0600 Subject: [PATCH 2/8] load: accept slt # b-field in 88 mode --- load.go | 10 ++++++---- load_test.go | 5 +++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/load.go b/load.go index 4a8b1bf..a62552f 100644 --- a/load.go +++ b/load.go @@ -223,10 +223,12 @@ func getOpModeAndValidate88(Op OpCode, AMode AddressMode, BMode AddressMode) (Op case SLT: // SLT; - // AB if #A, B otherwise, no #B allowed - if BMode == IMMEDIATE { - return 0, fmt.Errorf("invalid b mode '#' for op 'slt'") - } + // AB if #A, B otherwise + + // #B is not allowed by the 88 standard but is allowed on hills + // if BMode == IMMEDIATE { + // return 0, fmt.Errorf("invalid b mode '#' for op 'slt'") + // } if AMode == IMMEDIATE { return AB, nil } else { diff --git a/load_test.go b/load_test.go index ed84ef6..b21bb20 100644 --- a/load_test.go +++ b/load_test.go @@ -66,11 +66,12 @@ func TestLoadDwarf(t *testing.T) { }, data.Code) } -func TestValidInput(t *testing.T) { +func TestValidInput88(t *testing.T) { // random inputs that are valid but not worth validating output cases := []string{ "END\n", "\n\n", + "SLT $ 0, # 0\n", // not in 88 spec, pMARS supports this though } config := ConfigKOTH88 @@ -112,7 +113,7 @@ func TestInvalidInput(t *testing.T) { "DAT # 0, $ 0\n", "DAT # 0, @ 0\n", "CMP $ 0, # 0\n", - "SLT $ 0, # 0\n", + // "SLT $ 0, # 0\n", // pMARS supports this "ADD $ 0, # 0\n", "SUB $ 0, # 0\n", "JMP # 0, $ 0\n", From 8cc570c64f057d4e13fb504e5f7d35a4c6efc0d2 Mon Sep 17 00:00:00 2001 From: Robert Lowry Date: Sun, 24 Nov 2024 21:02:21 -0600 Subject: [PATCH 3/8] update more 88 tests --- load_test.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/load_test.go b/load_test.go index b21bb20..62d4209 100644 --- a/load_test.go +++ b/load_test.go @@ -193,6 +193,12 @@ func TestValidOpModeCombos88(t *testing.T) { {"SLT < 1, @ 2\n", Instruction{Op: SLT, OpMode: B, AMode: B_DECREMENT, A: 1, BMode: B_INDIRECT, B: 2}}, {"SLT < 1, < 2\n", Instruction{Op: SLT, OpMode: B, AMode: B_DECREMENT, A: 1, BMode: B_DECREMENT, B: 2}}, + // these are not listed as legal instructions but do run in pMARS + {"SLT # 1, # 2\n", Instruction{Op: SLT, OpMode: AB, AMode: IMMEDIATE, A: 1, BMode: IMMEDIATE, B: 2}}, + {"SLT $ 1, # 2\n", Instruction{Op: SLT, OpMode: B, AMode: DIRECT, A: 1, BMode: IMMEDIATE, B: 2}}, + {"SLT @ 1, # 2\n", Instruction{Op: SLT, OpMode: B, AMode: B_INDIRECT, A: 1, BMode: IMMEDIATE, B: 2}}, + {"SLT < 1, # 2\n", Instruction{Op: SLT, OpMode: B, AMode: B_DECREMENT, A: 1, BMode: IMMEDIATE, B: 2}}, + // JMP, JMN, JMZ, DJN, SPL {"JMP $ 1, # 2\n", Instruction{Op: JMP, OpMode: B, AMode: DIRECT, A: 1, BMode: IMMEDIATE, B: 2}}, {"JMP $ 1, $ 2\n", Instruction{Op: JMP, OpMode: B, AMode: DIRECT, A: 1, BMode: DIRECT, B: 2}}, From 4d0de37f2c7ec894ff7164ab25d306f730e6dfee Mon Sep 17 00:00:00 2001 From: Robert Lowry Date: Tue, 26 Nov 2024 21:31:14 -0600 Subject: [PATCH 4/8] implement forExpander with enought code to pass-through tokens --- forexpand.go | 161 ++++++++++++++++++++++++++++++++++++++++++++++ forexpand_test.go | 65 +++++++++++++++++++ 2 files changed, 226 insertions(+) create mode 100644 forexpand.go create mode 100644 forexpand_test.go diff --git a/forexpand.go b/forexpand.go new file mode 100644 index 0000000..853c167 --- /dev/null +++ b/forexpand.go @@ -0,0 +1,161 @@ +package gmars + +import ( + "fmt" + "strings" +) + +type forExpander struct { + lex tokenReader + + nextToken token + labelBuf []string + // valueBuf []token + atEOF bool + + tokens chan token + closed bool + err error +} + +type forStateFn func(f *forExpander) forStateFn + +func newForExpander(lex tokenReader) *forExpander { + f := &forExpander{lex: lex} + f.next() + f.tokens = make(chan token) + go f.run() + return f +} + +func (p *forExpander) next() token { + if p.atEOF { + return token{typ: tokEOF} + } + tok, err := p.lex.NextToken() + if err != nil { + p.atEOF = true + return token{tokError, fmt.Sprintf("%s\n", err)} + } + if tok.typ == tokEOF || tok.typ == tokError { + p.atEOF = true + } + retTok := p.nextToken + p.nextToken = tok + return retTok +} + +func (f *forExpander) run() { + if f.closed || f.atEOF { + return + } + for state := forLine; state != nil; { + state = state(f) + } + f.closed = true +} + +func (f *forExpander) NextToken() (token, error) { + if f.closed { + return token{}, fmt.Errorf("no more tokens") + } + return <-f.tokens, nil +} + +func (f *forExpander) Tokens() ([]token, error) { + if f.closed { + return nil, fmt.Errorf("no more tokens") + } + tokens := make([]token, 0) + for !f.closed { + tok := <-f.tokens + tokens = append(tokens, tok) + if tok.typ == tokEOF || tok.typ == tokError { + break + } + } + return tokens, nil +} + +func (f *forExpander) emitConsume(nextState forStateFn) forStateFn { + f.tokens <- f.nextToken + f.next() + return nextState +} + +// forLine is the base state and returned to after every newline outside a for loop +// text: forConsumeLabels +// anything else: forConsumeLine +func forLine(f *forExpander) forStateFn { + switch f.nextToken.typ { + case tokText: + f.labelBuf = make([]string, 0) + return forConsumeLabels + default: + return f.emitConsume(forConsumeLine) + } +} + +// consume labels into labelBuf and go to next state +// text "for": forFor +// text op/pseudo: forWriteLabelsConsumeLine +// text other: append to labelBuf, forConsumeLabels +// newline/comment: forConsumeLabels +// other: nil +func forConsumeLabels(f *forExpander) forStateFn { + if f.nextToken.typ == tokText { + + if f.nextToken.IsPseudoOp() { + opLower := strings.ToLower(f.nextToken.val) + if opLower == "for" { + return forFor + } else { + return forWriteLabelsConsumeLine + } + } else if f.nextToken.IsOp() { + return forWriteLabelsConsumeLine + } else { + f.labelBuf = append(f.labelBuf, f.nextToken.val) + f.next() + return forConsumeLabels + } + } else if f.nextToken.typ == tokNewline || f.nextToken.typ == tokComment { + f.next() + return forConsumeLabels + } else { + f.err = fmt.Errorf("expected label, op, newlines, or comment, got '%s'", f.nextToken) + return nil + } +} + +func forWriteLabelsConsumeLine(f *forExpander) forStateFn { + for _, label := range f.labelBuf { + f.tokens <- token{tokText, label} + } + f.labelBuf = make([]string, 0) + return f.emitConsume(forConsumeLine) +} + +func forConsumeLine(f *forExpander) forStateFn { + switch f.nextToken.typ { + case tokNewline: + return f.emitConsume(forLine) + case tokError: + return f.emitConsume(nil) + case tokEOF: + return f.emitConsume(nil) + default: + return f.emitConsume(forConsumeLine) + } +} + +func forFor(f *forExpander) forStateFn { + // if len(f.labelBuf) == 0 { + + // } + return nil +} + +func forInnerLine(f *forExpander) forStateFn { + return nil +} diff --git a/forexpand_test.go b/forexpand_test.go new file mode 100644 index 0000000..6f8f368 --- /dev/null +++ b/forexpand_test.go @@ -0,0 +1,65 @@ +package gmars + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +type forTestCase struct { + input string + output []token +} + +func runForExpanderTests(t *testing.T, cases []forTestCase) { + for _, test := range cases { + tokens, err := LexInput(strings.NewReader(test.input)) + require.NoError(t, err) + require.NotNil(t, tokens) + + // scanner := newSymbolScanner(newBufTokenReader(tokens)) + expander := newForExpander(newBufTokenReader(tokens)) + outTokens, err := expander.Tokens() + require.NoError(t, err) + + require.Equal(t, test.output, outTokens) + } +} + +func TestForExpander(t *testing.T) { + tests := []forTestCase{ + { + input: "test equ 2\ndat 0, test\n", + output: []token{ + {tokText, "test"}, + {tokText, "equ"}, + {tokNumber, "2"}, + {tokNewline, ""}, + {tokText, "dat"}, + {tokNumber, "0"}, + {tokComma, ","}, + {tokText, "test"}, + {tokNewline, ""}, + {tokEOF, ""}, + }, + }, + { + input: "test test2 equ 2\ndat 0, test\n", + output: []token{ + {tokText, "test"}, + {tokText, "test2"}, + {tokText, "equ"}, + {tokNumber, "2"}, + {tokNewline, ""}, + {tokText, "dat"}, + {tokNumber, "0"}, + {tokComma, ","}, + {tokText, "test"}, + {tokNewline, ""}, + {tokEOF, ""}, + }, + }, + } + runForExpanderTests(t, tests) +} From 50b201145835540e6c7cf01dcc0ad165358efece Mon Sep 17 00:00:00 2001 From: Robert Lowry Date: Wed, 27 Nov 2024 21:03:06 -0600 Subject: [PATCH 5/8] more work on forexpander --- forexpand.go | 52 ++++++++++++++++++++++++++++++++++++++--------- forexpand_test.go | 17 +++++++++++++++- 2 files changed, 58 insertions(+), 11 deletions(-) diff --git a/forexpand.go b/forexpand.go index 853c167..988ca77 100644 --- a/forexpand.go +++ b/forexpand.go @@ -10,8 +10,8 @@ type forExpander struct { nextToken token labelBuf []string - // valueBuf []token - atEOF bool + exprBuf []token + atEOF bool tokens chan token closed bool @@ -52,6 +52,9 @@ func (f *forExpander) run() { for state := forLine; state != nil; { state = state(f) } + // add an extra EOF in case we end without one + // we don't want to block on reading from the channel + f.tokens <- token{tokEOF, ""} f.closed = true } @@ -92,7 +95,7 @@ func forLine(f *forExpander) forStateFn { f.labelBuf = make([]string, 0) return forConsumeLabels default: - return f.emitConsume(forConsumeLine) + return f.emitConsume(forConsumeEmitLine) } } @@ -108,7 +111,9 @@ func forConsumeLabels(f *forExpander) forStateFn { if f.nextToken.IsPseudoOp() { opLower := strings.ToLower(f.nextToken.val) if opLower == "for" { - return forFor + f.next() + f.exprBuf = make([]token, 0) + return forConsumeExpression } else { return forWriteLabelsConsumeLine } @@ -128,15 +133,19 @@ func forConsumeLabels(f *forExpander) forStateFn { } } +// forWriteLabelsConsumeLine writes all the stored labels to the token channel, +// emits the current nextToken and returns forConsumeLine func forWriteLabelsConsumeLine(f *forExpander) forStateFn { for _, label := range f.labelBuf { f.tokens <- token{tokText, label} } f.labelBuf = make([]string, 0) - return f.emitConsume(forConsumeLine) + return f.emitConsume(forConsumeEmitLine) } -func forConsumeLine(f *forExpander) forStateFn { +// forConsumeEmitLine consumes and emits tokens until a newline is reached +// the newline is consumed and emitted before calling forLine +func forConsumeEmitLine(f *forExpander) forStateFn { switch f.nextToken.typ { case tokNewline: return f.emitConsume(forLine) @@ -145,17 +154,40 @@ func forConsumeLine(f *forExpander) forStateFn { case tokEOF: return f.emitConsume(nil) default: - return f.emitConsume(forConsumeLine) + return f.emitConsume(forConsumeEmitLine) } } -func forFor(f *forExpander) forStateFn { - // if len(f.labelBuf) == 0 { +// forConsumeExpressions consumes tokens into the exprBuf until +// a newline is reached then returns forInnerLine after consuming +// the newline to +// newline: forInnerLine +// error: emit, nil +// eof: nil +// otherwise: forConsumeExpression +func forConsumeExpression(f *forExpander) forStateFn { + switch f.nextToken.typ { + case tokNewline: + f.next() + return forInnerLine + case tokError: + return f.emitConsume(nil) + case tokEOF: + return nil + default: + // f.tokens <- f.nextToken + f.exprBuf = append(f.exprBuf, f.nextToken) + f.next() + return forConsumeExpression + } +} - // } +func forFor(f *forExpander) forStateFn { + f.tokens <- token{tokError, ""} return nil } func forInnerLine(f *forExpander) forStateFn { + f.tokens <- token{tokError, ""} return nil } diff --git a/forexpand_test.go b/forexpand_test.go index 6f8f368..ee1c24d 100644 --- a/forexpand_test.go +++ b/forexpand_test.go @@ -22,13 +22,28 @@ func runForExpanderTests(t *testing.T, cases []forTestCase) { expander := newForExpander(newBufTokenReader(tokens)) outTokens, err := expander.Tokens() require.NoError(t, err) - require.Equal(t, test.output, outTokens) } } func TestForExpander(t *testing.T) { tests := []forTestCase{ + { + input: "i for 2\ndat 0, i\nrof\n", + output: []token{ + {tokText, "dat"}, + {tokNumber, "0"}, + {tokComma, ","}, + {tokText, "1"}, + {tokNewline, ""}, + {tokText, "dat"}, + {tokNumber, "0"}, + {tokComma, ","}, + {tokText, "2"}, + {tokNewline, ""}, + {tokEOF, ""}, + }, + }, { input: "test equ 2\ndat 0, test\n", output: []token{ From 06480581e24c5196f7ead4385fbbf8a57d27c86e Mon Sep 17 00:00:00 2001 From: Robert Lowry Date: Thu, 28 Nov 2024 18:49:26 -0600 Subject: [PATCH 6/8] forexpand: collect and evaluate count expressions --- expr.go | 28 +++++++++++++++++++++++++ forexpand.go | 52 ++++++++++++++++++++++++++++++++++++++++------- forexpand_test.go | 7 ++++--- 3 files changed, 77 insertions(+), 10 deletions(-) diff --git a/expr.go b/expr.go index b7281ee..968d8f8 100644 --- a/expr.go +++ b/expr.go @@ -7,6 +7,34 @@ import ( "strconv" ) +func ExpandAndEvaluate(expr []token, symbols map[string][]token) (int, error) { + graph := buildReferenceGraph(symbols) + + cyclic, key := graphContainsCycle(graph) + if cyclic { + return 0, fmt.Errorf("symbol graph contains cycles: %s", key) + } + + resolved, err := expandExpressions(symbols, graph) + if err != nil { + return 0, err + } + + expanded := make([]token, 0) + for _, tok := range expr { + if tok.typ == tokText { + symVal, ok := resolved[tok.val] + if ok { + expanded = append(expanded, symVal...) + continue + } + } + expanded = append(expanded, tok) + } + + return evaluateExpression(expanded) +} + func expandValue(key string, values, resolved map[string][]token, graph map[string][]string) ([]token, error) { // load key value or error value, valOk := values[key] diff --git a/forexpand.go b/forexpand.go index 988ca77..3447b9f 100644 --- a/forexpand.go +++ b/forexpand.go @@ -8,11 +8,20 @@ import ( type forExpander struct { lex tokenReader + // lexing state fields nextToken token labelBuf []string exprBuf []token atEOF bool + // for state fields + forCount int + forIndex int + forContent []token + + symbols map[string][]token + + // output fields tokens chan token closed bool err error @@ -20,8 +29,8 @@ type forExpander struct { type forStateFn func(f *forExpander) forStateFn -func newForExpander(lex tokenReader) *forExpander { - f := &forExpander{lex: lex} +func newForExpander(lex tokenReader, symbols map[string][]token) *forExpander { + f := &forExpander{lex: lex, symbols: symbols} f.next() f.tokens = make(chan token) go f.run() @@ -161,7 +170,7 @@ func forConsumeEmitLine(f *forExpander) forStateFn { // forConsumeExpressions consumes tokens into the exprBuf until // a newline is reached then returns forInnerLine after consuming // the newline to -// newline: forInnerLine +// newline: forFor // error: emit, nil // eof: nil // otherwise: forConsumeExpression @@ -169,7 +178,7 @@ func forConsumeExpression(f *forExpander) forStateFn { switch f.nextToken.typ { case tokNewline: f.next() - return forInnerLine + return forFor case tokError: return f.emitConsume(nil) case tokEOF: @@ -182,12 +191,41 @@ func forConsumeExpression(f *forExpander) forStateFn { } } +// input: exprBuf from forConsumeExpression +// evaluates count expression and func forFor(f *forExpander) forStateFn { - f.tokens <- token{tokError, ""} - return nil + expr := make([]token, 0, len(f.exprBuf)) + for _, token := range f.exprBuf { + if token.typ == tokEOF || token.typ == tokError { + f.err = fmt.Errorf("unexpected expression term: %s", token) + } + expr = append(expr, token) + } + f.exprBuf = expr + + val, err := ExpandAndEvaluate(f.exprBuf, f.symbols) + if err != nil { + f.tokens <- token{tokError, fmt.Sprintf("%s", err)} + return nil + } + + f.forCount = val + f.forIndex = 0 // should not be necessary + f.forContent = make([]token, 0) + + return forInnerLine } func forInnerLine(f *forExpander) forStateFn { - f.tokens <- token{tokError, ""} + switch f.nextToken.typ { + case tokText: + return forInnerLabels + default: + return nil + } +} + +// this is really just to drop labels before 'rof' +func forInnerLabels(f *forExpander) forStateFn { return nil } diff --git a/forexpand_test.go b/forexpand_test.go index ee1c24d..8614074 100644 --- a/forexpand_test.go +++ b/forexpand_test.go @@ -8,8 +8,9 @@ import ( ) type forTestCase struct { - input string - output []token + input string + symbols map[string][]token + output []token } func runForExpanderTests(t *testing.T, cases []forTestCase) { @@ -19,7 +20,7 @@ func runForExpanderTests(t *testing.T, cases []forTestCase) { require.NotNil(t, tokens) // scanner := newSymbolScanner(newBufTokenReader(tokens)) - expander := newForExpander(newBufTokenReader(tokens)) + expander := newForExpander(newBufTokenReader(tokens), test.symbols) outTokens, err := expander.Tokens() require.NoError(t, err) require.Equal(t, test.output, outTokens) From 814ba7ffc5893bce4c3efcf7168a8b6a3515e744 Mon Sep 17 00:00:00 2001 From: Robert Lowry Date: Sat, 30 Nov 2024 09:16:55 -0600 Subject: [PATCH 7/8] forExpander: emit for content with count variables substituted --- forexpand.go | 114 ++++++++++++++++++++++++++++++++++++++++++---- forexpand_test.go | 26 ++++++++++- 2 files changed, 129 insertions(+), 11 deletions(-) diff --git a/forexpand.go b/forexpand.go index 3447b9f..a82c90a 100644 --- a/forexpand.go +++ b/forexpand.go @@ -15,9 +15,11 @@ type forExpander struct { atEOF bool // for state fields - forCount int - forIndex int - forContent []token + forCountLabel string + forLineLabels []string + forCount int + forIndex int + forContent []token symbols map[string][]token @@ -124,10 +126,10 @@ func forConsumeLabels(f *forExpander) forStateFn { f.exprBuf = make([]token, 0) return forConsumeExpression } else { - return forWriteLabelsConsumeLine + return forWriteLabelsEmitConsumeLine } } else if f.nextToken.IsOp() { - return forWriteLabelsConsumeLine + return forWriteLabelsEmitConsumeLine } else { f.labelBuf = append(f.labelBuf, f.nextToken.val) f.next() @@ -142,9 +144,9 @@ func forConsumeLabels(f *forExpander) forStateFn { } } -// forWriteLabelsConsumeLine writes all the stored labels to the token channel, +// forWriteLabelsEmitConsumeLine writes all the stored labels to the token channel, // emits the current nextToken and returns forConsumeLine -func forWriteLabelsConsumeLine(f *forExpander) forStateFn { +func forWriteLabelsEmitConsumeLine(f *forExpander) forStateFn { for _, label := range f.labelBuf { f.tokens <- token{tokText, label} } @@ -192,7 +194,8 @@ func forConsumeExpression(f *forExpander) forStateFn { } // input: exprBuf from forConsumeExpression -// evaluates count expression and +// evaluates count expression and sets up for state +// always returns forInnerLine or Error func forFor(f *forExpander) forStateFn { expr := make([]token, 0, len(f.exprBuf)) for _, token := range f.exprBuf { @@ -209,23 +212,116 @@ func forFor(f *forExpander) forStateFn { return nil } + if len(f.labelBuf) > 0 { + f.forCountLabel = f.labelBuf[len(f.labelBuf)-1] + if len(f.labelBuf) > 1 { + f.forLineLabels = f.labelBuf[:len(f.labelBuf)-1] + } else { + f.forLineLabels = []string{} + } + } else { + f.forCountLabel = "" + f.forLineLabels = []string{} + } + f.forCount = val f.forIndex = 0 // should not be necessary f.forContent = make([]token, 0) + f.labelBuf = make([]string, 0) return forInnerLine } +// text: forInnerConsumeLabels +// other: forInnerConsumeLine func forInnerLine(f *forExpander) forStateFn { switch f.nextToken.typ { case tokText: return forInnerLabels default: - return nil + // emitconsume line into for buffer + return forInnerEmitConsumeLine } } // this is really just to drop labels before 'rof' func forInnerLabels(f *forExpander) forStateFn { + switch f.nextToken.typ { + case tokText: + if f.nextToken.IsPseudoOp() && strings.ToLower(f.nextToken.val) == "rof" { + // just call to emit the buffer + + return forRof + } else if f.nextToken.IsOp() { + // write labels and op into emit buffer then emitcomsume line + return forInnerEmitLabels + } else { + f.labelBuf = append(f.labelBuf, f.nextToken.val) + f.next() + return forInnerLabels + } + default: + return forInnerEmitLabels + // emit labels and emitconsume line into for buffer + } +} + +func forInnerEmitLabels(f *forExpander) forStateFn { + for _, label := range f.labelBuf { + f.tokens <- token{tokText, label} + } + return forInnerEmitConsumeLine +} + +func forInnerEmitConsumeLine(f *forExpander) forStateFn { + switch f.nextToken.typ { + case tokError: + // TODO + return nil + case tokEOF: + return nil + case tokNewline: + // f.tokens <- f.nextToken + f.forContent = append(f.forContent, f.nextToken) + f.next() + return forInnerLine + default: + f.forContent = append(f.forContent, f.nextToken) + f.next() + return forInnerEmitConsumeLine + } +} + +func forRof(f *forExpander) forStateFn { + for f.nextToken.typ != tokNewline { + if f.nextToken.typ == tokEOF || f.nextToken.typ == tokError { + return nil + } + f.next() + } + f.next() + + for i := 1; i <= f.forCount; i++ { + for _, tok := range f.forContent { + if tok.typ == tokText { + if tok.val == f.forCountLabel { + f.tokens <- token{tokNumber, fmt.Sprintf("%d", i)} + } else { + f.tokens <- tok + } + } else { + f.tokens <- tok + } + } + } + + return forEmitConsumeStream +} + +func forEmitConsumeStream(f *forExpander) forStateFn { + for f.nextToken.typ != tokEOF { + f.tokens <- f.nextToken + f.next() + } return nil } diff --git a/forexpand_test.go b/forexpand_test.go index 8614074..92b376f 100644 --- a/forexpand_test.go +++ b/forexpand_test.go @@ -35,16 +35,38 @@ func TestForExpander(t *testing.T) { {tokText, "dat"}, {tokNumber, "0"}, {tokComma, ","}, - {tokText, "1"}, + {tokNumber, "1"}, {tokNewline, ""}, {tokText, "dat"}, {tokNumber, "0"}, {tokComma, ","}, - {tokText, "2"}, + {tokNumber, "2"}, + {tokNewline, ""}, + {tokEOF, ""}, + }, + }, + { + input: "i for 2\ndat 0, i\nrof\ndat 3, 4\n", + output: []token{ + {tokText, "dat"}, + {tokNumber, "0"}, + {tokComma, ","}, + {tokNumber, "1"}, + {tokNewline, ""}, + {tokText, "dat"}, + {tokNumber, "0"}, + {tokComma, ","}, + {tokNumber, "2"}, + {tokNewline, ""}, + {tokText, "dat"}, + {tokNumber, "3"}, + {tokComma, ","}, + {tokNumber, "4"}, {tokNewline, ""}, {tokEOF, ""}, }, }, + // no for { input: "test equ 2\ndat 0, test\n", output: []token{ From 758d2c77f2d80aa9571700b9d12c849b6f90248b Mon Sep 17 00:00:00 2001 From: Robert Lowry Date: Sat, 30 Nov 2024 09:30:50 -0600 Subject: [PATCH 8/8] add ForExpand API --- compile.go | 30 +++++++++++++++++------------- forexpand.go | 9 +++++++++ test_files/.gitignore | 4 ++++ 3 files changed, 30 insertions(+), 13 deletions(-) create mode 100644 test_files/.gitignore diff --git a/compile.go b/compile.go index 8bf608a..31dff6a 100644 --- a/compile.go +++ b/compile.go @@ -451,19 +451,23 @@ func CompileWarrior(r io.Reader, config SimulatorConfig) (WarriorData, error) { return WarriorData{}, err } - for { - _, forSeen, err := ScanInput(newBufTokenReader(tokens)) - if err != nil { - return WarriorData{}, fmt.Errorf("symbol scanner: %s", err) - } - if forSeen { - // tokens := ForExpand(newBufTokenReader(tokens), symbols) - // we will just break here for now or else it is an infinite loop - break - } else { - break - } - } + // for { + // symbols, forSeen, err := ScanInput(newBufTokenReader(tokens)) + // if err != nil { + // return WarriorData{}, fmt.Errorf("symbol scanner: %s", err) + // } + // if forSeen { + // expandedTokens, err := ForExpand(newBufTokenReader(tokens), symbols) + // if err != nil { + // return WarriorData{}, fmt.Errorf("for: %s", err) + // } + // tokens = expandedTokens + // // oops the embedded for loops are not implemented + // break + // } else { + // break + // } + // } parser := newParser(newBufTokenReader(tokens)) sourceLines, metadata, err := parser.parse() diff --git a/forexpand.go b/forexpand.go index a82c90a..dae12a3 100644 --- a/forexpand.go +++ b/forexpand.go @@ -39,6 +39,15 @@ func newForExpander(lex tokenReader, symbols map[string][]token) *forExpander { return f } +func ForExpand(lex tokenReader, symbols map[string][]token) ([]token, error) { + expander := newForExpander(lex, symbols) + tokens, err := expander.Tokens() + if err != nil { + return nil, err + } + return tokens, nil +} + func (p *forExpander) next() token { if p.atEOF { return token{typ: tokEOF} diff --git a/test_files/.gitignore b/test_files/.gitignore new file mode 100644 index 0000000..a86f18d --- /dev/null +++ b/test_files/.gitignore @@ -0,0 +1,4 @@ +k88/ +k88c/ +k94/ +k94c/