diff --git a/lex.go b/lex.go index 113b0d3..dcf9fd7 100644 --- a/lex.go +++ b/lex.go @@ -176,8 +176,13 @@ func lexInput(l *lexer) lexStateFn { case '\x1a': return l.consume(lexInput) default: + // we will put this in the stream. if a file is formatted + // properly, and invalid input should be after an 'end' + // pseudo-op which will cause the parser to stop before + // processing this token, otherwise it is an error l.tokens <- token{tokInvalid, string(l.nextRune)} - return l.consume(lexInput) + l.tokens <- token{typ: tokEOF} + return nil } return nil diff --git a/symbol_scanner.go b/symbol_scanner.go index 1c0459d..89eef1b 100644 --- a/symbol_scanner.go +++ b/symbol_scanner.go @@ -14,7 +14,6 @@ type symbolScanner struct { nextToken token atEOF bool - endSeen bool valBuf []token labelBuf []string forLevel int @@ -109,7 +108,6 @@ func scanLabels(p *symbolScanner) scanStateFn { } return scanConsumeLine case "end": - p.endSeen = true if p.forLevel > 1 { return scanConsumeLine } else { @@ -120,13 +118,8 @@ func scanLabels(p *symbolScanner) scanStateFn { } } else if p.nextToken.IsOp() { return scanConsumeLine - } else if p.nextToken.typ == tokError { - if p.endSeen { - return nil - } else { - - return nil - } + } else if p.nextToken.typ == tokInvalid { + return nil } p.labelBuf = append(p.labelBuf, p.nextToken.val) return p.consume(scanLabels) diff --git a/symbol_scanner_test.go b/symbol_scanner_test.go index 5f8c48f..8081ef4 100644 --- a/symbol_scanner_test.go +++ b/symbol_scanner_test.go @@ -54,6 +54,10 @@ func TestSymbolScanner(t *testing.T) { "test": {{tokNumber, "2"}}, }, }, + { + input: "for 1\nend\nrof\n ~", + output: map[string][]token{}, + }, } runSymbolScannerTests(t, tests) }