diff --git a/parse_test.go b/parse_test.go index ceedd4b4..bb3abca5 100644 --- a/parse_test.go +++ b/parse_test.go @@ -507,11 +507,17 @@ func TestLineNumberError(t *testing.T) { // Test with no known RR on the line func TestLineNumberError2(t *testing.T) { s := "example.com. 1000 SO master.example.com. admin.example.com. 1 4294967294 4294967293 4294967295 100" + s = "example.com 1000 IN TALINK a.example.com. b.\\@example.com." + s = "example.com 1000 IN TALINK ( a.example.com. b.\\@example.com. )" + s = `example.com 1000 IN TALINK ( a.example.com. + b.\@example.com. )` + s = `example.com 1000 IN TALINK ( a.example.com. b.\@example.com. + )` _, err := NewRR(s) if err == nil { t.Fail() } else { - // fmt.Printf("%s\n", err.Error()) + fmt.Printf("%s\n", err.Error()) } } diff --git a/scanner.go b/scanner.go index 967b152a..c29bc2f3 100644 --- a/scanner.go +++ b/scanner.go @@ -11,7 +11,7 @@ import ( type scan struct { src *bufio.Reader position scanner.Position - eof int // have we just seen an EOF (0 no, 1 yes) + eof bool // Have we just seen a eof } func scanInit(r io.Reader) *scan { @@ -27,11 +27,16 @@ func (s *scan) tokenText() (byte, error) { if err != nil { return c, err } - s.eof = 0 - if c == '\n' { + // delay the newline handling until the next token is delivered, + // fixes off-by-one errors when reporting a parse error. + if s.eof == true { s.position.Line++ s.position.Column = 0 - s.eof = 1 + s.eof = false + } + if c == '\n' { + s.eof = true + return c, nil } s.position.Column++ return c, nil diff --git a/zscan.go b/zscan.go index 8a6b678c..1b8a4cd6 100644 --- a/zscan.go +++ b/zscan.go @@ -72,11 +72,8 @@ func (e *ParseError) Error() (s string) { if e.file != "" { s = e.file + ": " } - // the -e.lex.eof is used for a file line number correction. The error - // we are printing happend on the line N, but the tokenizer already - // saw the \n and incremented the linenumber counter s += "dns: " + e.err + ": " + strconv.QuoteToASCII(e.lex.token) + " at line: " + - strconv.Itoa(e.lex.line-e.lex.eof) + ":" + strconv.Itoa(e.lex.column) + strconv.Itoa(e.lex.line) + ":" + strconv.Itoa(e.lex.column) return } @@ -86,7 +83,6 @@ type lex struct { value uint8 // Value: _STRING, _BLANK, etc. line int // Line in the file column int // Column in the file - eof int // Has the tokenizer just seen a newline (0 no, 1 yes) torc uint16 // Type or class as parsed in the lexer, we only need to look this up in the grammar } @@ -451,7 +447,6 @@ func zlexer(s *scan, c chan lex) { for err == nil { l.column = s.position.Column l.line = s.position.Line - l.eof = s.eof if stri > maxTok { l.token = "tok length insufficient for parsing" l.err = true