delayed newline counting

Fix a lot of corning cases for correct newline counting, but not
the ones where the closing brace is solely on a newline.
This commit is contained in:
Miek Gieben 2012-11-21 22:21:40 +01:00
parent c15d4fe3a3
commit 5a3db7b240
3 changed files with 17 additions and 11 deletions

View File

@ -507,11 +507,17 @@ func TestLineNumberError(t *testing.T) {
// Test with no known RR on the line
func TestLineNumberError2(t *testing.T) {
s := "example.com. 1000 SO master.example.com. admin.example.com. 1 4294967294 4294967293 4294967295 100"
s = "example.com 1000 IN TALINK a.example.com. b.\\@example.com."
s = "example.com 1000 IN TALINK ( a.example.com. b.\\@example.com. )"
s = `example.com 1000 IN TALINK ( a.example.com.
b.\@example.com. )`
s = `example.com 1000 IN TALINK ( a.example.com. b.\@example.com.
)`
_, err := NewRR(s)
if err == nil {
t.Fail()
} else {
// fmt.Printf("%s\n", err.Error())
fmt.Printf("%s\n", err.Error())
}
}

View File

@ -11,7 +11,7 @@ import (
type scan struct {
src *bufio.Reader
position scanner.Position
eof int // have we just seen an EOF (0 no, 1 yes)
eof bool // Have we just seen a eof
}
func scanInit(r io.Reader) *scan {
@ -27,11 +27,16 @@ func (s *scan) tokenText() (byte, error) {
if err != nil {
return c, err
}
s.eof = 0
if c == '\n' {
// delay the newline handling until the next token is delivered,
// fixes off-by-one errors when reporting a parse error.
if s.eof == true {
s.position.Line++
s.position.Column = 0
s.eof = 1
s.eof = false
}
if c == '\n' {
s.eof = true
return c, nil
}
s.position.Column++
return c, nil

View File

@ -72,11 +72,8 @@ func (e *ParseError) Error() (s string) {
if e.file != "" {
s = e.file + ": "
}
// the -e.lex.eof is used for a file line number correction. The error
// we are printing happend on the line N, but the tokenizer already
// saw the \n and incremented the linenumber counter
s += "dns: " + e.err + ": " + strconv.QuoteToASCII(e.lex.token) + " at line: " +
strconv.Itoa(e.lex.line-e.lex.eof) + ":" + strconv.Itoa(e.lex.column)
strconv.Itoa(e.lex.line) + ":" + strconv.Itoa(e.lex.column)
return
}
@ -86,7 +83,6 @@ type lex struct {
value uint8 // Value: _STRING, _BLANK, etc.
line int // Line in the file
column int // Column in the file
eof int // Has the tokenizer just seen a newline (0 no, 1 yes)
torc uint16 // Type or class as parsed in the lexer, we only need to look this up in the grammar
}
@ -451,7 +447,6 @@ func zlexer(s *scan, c chan lex) {
for err == nil {
l.column = s.position.Column
l.line = s.position.Line
l.eof = s.eof
if stri > maxTok {
l.token = "tok length insufficient for parsing"
l.err = true