From 487e60056273c3b96d956dac843940ad987316e5 Mon Sep 17 00:00:00 2001 From: Miek Gieben Date: Sat, 17 Dec 2011 11:28:54 +0100 Subject: [PATCH] Fix parsing when RR ends in a brace * Testcase added --- parse_test.go | 46 ++++++++++++++++++++++++++++++++++++++++------ zscan.go | 43 +++++++++++++++++++++++++++++-------------- 2 files changed, 69 insertions(+), 20 deletions(-) diff --git a/parse_test.go b/parse_test.go index 6963a10a..0392ca70 100644 --- a/parse_test.go +++ b/parse_test.go @@ -110,10 +110,11 @@ func TestParse(t *testing.T) { "dnsex.nl. 86400 IN RRSIG SOA 8 2 86400 20110403154150 20110304154150 23334 dnsex.nl. QN6hwJQLEBqRVKmO2LgkuRSx9bkKIZxXlTVtHg5SaiN+8RCTckGtUXkQ vmZiBt3RdIWAjaabQYpEZHgvyjfy4Wwu/9RPDYnLt/qoyr4QKAdujchc m+fMDSbbcC7AN08i5D/dUWfNOHXjRJLY7t7AYB9DBt32LazIb0EU9QiW 5Cg=": "dnsex.nl.\t86400\tIN\tRRSIG\tSOA 8 2 86400 20110403154150 20110304154150 23334 dnsex.nl. QN6hwJQLEBqRVKmO2LgkuRSx9bkKIZxXlTVtHg5SaiN+8RCTckGtUXkQvmZiBt3RdIWAjaabQYpEZHgvyjfy4Wwu/9RPDYnLt/qoyr4QKAdujchcm+fMDSbbcC7AN08i5D/dUWfNOHXjRJLY7t7AYB9DBt32LazIb0EU9QiW5Cg=", } for i, o := range tests { - rr, _ := NewRR(i) - if rr == nil { + rr, e:= NewRR(i) + if e != nil { t.Log("Failed to parse RR") t.Fail() + continue } if rr.String() != o { t.Logf("`%s' should be equal to\n`%s', but is `%s'\n", i, o, rr.String()) @@ -124,13 +125,17 @@ func TestParse(t *testing.T) { } } -func TestParseBraces(t *testing.T) { +func TestParseBrace(t *testing.T) { tests := map[string]string{ "(miek.nl.) 3600 IN A 127.0.0.1": "miek.nl.\t3600\tIN\tA\t127.0.0.1", "miek.nl. (3600) IN MX (10) elektron.atoom.net.": "miek.nl.\t3600\tIN\tMX\t10 elektron.atoom.net.", `miek.nl. IN ( 3600 A 127.0.0.1)`: "miek.nl.\t3600\tIN\tA\t127.0.0.1", "(miek.nl.) (A) (127.0.0.1)": "miek.nl.\t3600\tIN\tA\t127.0.0.1", + `(miek.nl.) ( + (IN) + (AAAA) + (::1) )`: "miek.nl.\t3600\tIN\tAAAA\t::1", `(miek.nl.) ( (IN) (AAAA) @@ -145,10 +150,11 @@ func TestParseBraces(t *testing.T) { )`: "miek.nl.\t86400\tIN\tSOA\telektron.atoom.net. miekg.atoom.net. 2009032802 21600 7200 604800 3600", } for i, o := range tests { - rr, _ := NewRR(i) - if rr == nil { - t.Log("Failed to parse RR") + rr, e := NewRR(i) + if e != nil { + t.Log("Failed to parse RR: " + e.Error()) t.Fail() + continue } if rr.String() != o { t.Logf("`%s' should be equal to\n`%s', but is `%s'\n", i, o, rr.String()) @@ -159,6 +165,34 @@ func TestParseBraces(t *testing.T) { } } +/* +func TestLexerBrace(t *testing.T) { + aaaa := `(miek.nl.) ( + (IN) + (AAAA) + ::1)` + +// aaaa = `miek.nl. ( +// IN +// AAAA +// ::1 ))` +// + var s scanner.Scanner + c := make(chan lex) + s.Init(strings.NewReader(aaaa)) + s.Mode = 0 + s.Whitespace = 0 + go zlexer(s, c) + for l := range c { + if l.err != "" { + t.Logf("E: %s\n", l.err) + continue + } + t.Logf("%s ", l) + } +} +*/ + func TestParseFailure(t *testing.T) { tests := []string{"miek.nl. IN A 327.0.0.1", "miek.nl. IN AAAA ::x", diff --git a/zscan.go b/zscan.go index 594f6c05..8a6badfa 100644 --- a/zscan.go +++ b/zscan.go @@ -59,10 +59,11 @@ func (e *ParseError) Error() string { } type lex struct { - token string // text of the token - value int // value: _STRING, _BLANK, etc. - line int // line in the file - column int // column in the fil + token string // Text of the token + err string // Error text when the lexer detects it. Not used by the grammar + value int // Value: _STRING, _BLANK, etc. + line int // Line in the file + column int // Column in the fil } type Token struct { @@ -117,6 +118,12 @@ func ParseZone(r io.Reader, t chan Token) { if _DEBUG { fmt.Printf("[%v]\n", l) } + // Lexer spotted an error already + if l.err != "" { + t <- Token{Error: &ParseError{l.err, l}} + return + + } switch st { case _EXPECT_OWNER: // Set the defaults here @@ -154,7 +161,7 @@ func ParseZone(r io.Reader, t chan Token) { case _STRING: // TTL is this case ttl, ok := strconv.Atoi(l.token) if ok != nil { - t <- Token{Error: &ParseError{"Not a TTL", l}} + t <- Token{Error: &ParseError{"Ownername seen, not a TTL", l}} return } else { h.Ttl = uint32(ttl) @@ -194,7 +201,7 @@ func ParseZone(r io.Reader, t chan Token) { case _STRING: // TTL ttl, ok := strconv.Atoi(l.token) if ok != nil { - t <- Token{Error: &ParseError{"Not a TTL", l}} + t <- Token{Error: &ParseError{"Class seen, not a TTL", l}} return } else { h.Ttl = uint32(ttl) @@ -241,17 +248,17 @@ func ParseZone(r io.Reader, t chan Token) { func (l lex) String() string { switch l.value { case _STRING: - return l.token + return "S:" + l.token + "$" case _BLANK: - return " " //"_" // seems to work, make then invisible for now + return "_" case _NEWLINE: return "|\n" case _RRTYPE: - return "R:" + l.token + return "R:" + l.token + "$" case _OWNER: - return "O:" + l.token + return "O:" + l.token + "$" case _CLASS: - return "C:" + l.token + return "C:" + l.token + "$" } return "" } @@ -291,7 +298,7 @@ func zlexer(s scanner.Scanner, c chan lex) { if !rrtype { if _, ok := Str_rr[strings.ToUpper(l.token)]; ok { l.value = _RRTYPE - rrtype = true // We've seen one + rrtype = true } if _, ok := Str_class[strings.ToUpper(l.token)]; ok { l.value = _CLASS @@ -305,8 +312,8 @@ func zlexer(s scanner.Scanner, c chan lex) { l.token = " " c <- l } - space = true owner = false + space = true case ";": if quote { // Inside quoted text we allow ; @@ -325,6 +332,12 @@ func zlexer(s scanner.Scanner, c chan lex) { if str != "" { l.value = _STRING l.token = str + if !rrtype { + if _, ok := Str_rr[strings.ToUpper(l.token)]; ok { + l.value = _RRTYPE + rrtype = true + } + } c <- l } if brace > 0 { @@ -363,7 +376,9 @@ func zlexer(s scanner.Scanner, c chan lex) { } brace-- if brace < 0 { - fmt.Printf("%s\n", &ParseError{"Extra closing brace", l}) + l.err = "Extra closing brace" + c <- l + return } default: if commt {