aboutsummaryrefslogtreecommitdiff
path: root/vendor/github.com/pelletier/go-toml/lexer.go
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/pelletier/go-toml/lexer.go')
-rw-r--r--vendor/github.com/pelletier/go-toml/lexer.go88
1 files changed, 41 insertions, 47 deletions
diff --git a/vendor/github.com/pelletier/go-toml/lexer.go b/vendor/github.com/pelletier/go-toml/lexer.go
index 104f3b1..1b6647d 100644
--- a/vendor/github.com/pelletier/go-toml/lexer.go
+++ b/vendor/github.com/pelletier/go-toml/lexer.go
@@ -6,14 +6,12 @@
package toml
import (
+ "bytes"
"errors"
"fmt"
- "io"
"regexp"
"strconv"
"strings"
-
- "github.com/pelletier/go-buffruneio"
)
var dateRegexp *regexp.Regexp
@@ -23,29 +21,29 @@ type tomlLexStateFn func() tomlLexStateFn
// Define lexer
type tomlLexer struct {
- input *buffruneio.Reader // Textual source
- buffer []rune // Runes composing the current token
- tokens chan token
- depth int
- line int
- col int
- endbufferLine int
- endbufferCol int
+ inputIdx int
+ input []rune // Textual source
+ currentTokenStart int
+ currentTokenStop int
+ tokens []token
+ depth int
+ line int
+ col int
+ endbufferLine int
+ endbufferCol int
}
// Basic read operations on input
func (l *tomlLexer) read() rune {
- r, _, err := l.input.ReadRune()
- if err != nil {
- panic(err)
- }
+ r := l.peek()
if r == '\n' {
l.endbufferLine++
l.endbufferCol = 1
} else {
l.endbufferCol++
}
+ l.inputIdx++
return r
}
@@ -53,13 +51,13 @@ func (l *tomlLexer) next() rune {
r := l.read()
if r != eof {
- l.buffer = append(l.buffer, r)
+ l.currentTokenStop++
}
return r
}
func (l *tomlLexer) ignore() {
- l.buffer = make([]rune, 0)
+ l.currentTokenStart = l.currentTokenStop
l.line = l.endbufferLine
l.col = l.endbufferCol
}
@@ -76,49 +74,46 @@ func (l *tomlLexer) fastForward(n int) {
}
func (l *tomlLexer) emitWithValue(t tokenType, value string) {
- l.tokens <- token{
+ l.tokens = append(l.tokens, token{
Position: Position{l.line, l.col},
typ: t,
val: value,
- }
+ })
l.ignore()
}
func (l *tomlLexer) emit(t tokenType) {
- l.emitWithValue(t, string(l.buffer))
+ l.emitWithValue(t, string(l.input[l.currentTokenStart:l.currentTokenStop]))
}
func (l *tomlLexer) peek() rune {
- r, _, err := l.input.ReadRune()
- if err != nil {
- panic(err)
+ if l.inputIdx >= len(l.input) {
+ return eof
}
- l.input.UnreadRune()
- return r
+ return l.input[l.inputIdx]
}
-func (l *tomlLexer) follow(next string) bool {
- for _, expectedRune := range next {
- r, _, err := l.input.ReadRune()
- defer l.input.UnreadRune()
- if err != nil {
- panic(err)
- }
- if expectedRune != r {
- return false
- }
+func (l *tomlLexer) peekString(size int) string {
+ maxIdx := len(l.input)
+ upperIdx := l.inputIdx + size // FIXME: potential overflow
+ if upperIdx > maxIdx {
+ upperIdx = maxIdx
}
- return true
+ return string(l.input[l.inputIdx:upperIdx])
+}
+
+func (l *tomlLexer) follow(next string) bool {
+ return next == l.peekString(len(next))
}
// Error management
func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
- l.tokens <- token{
+ l.tokens = append(l.tokens, token{
Position: Position{l.line, l.col},
typ: tokenError,
val: fmt.Sprintf(format, args...),
- }
+ })
return nil
}
@@ -219,7 +214,7 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
break
}
- possibleDate := string(l.input.PeekRunes(35))
+ possibleDate := l.peekString(35)
dateMatch := dateRegexp.FindString(possibleDate)
if dateMatch != "" {
l.fastForward(len(dateMatch))
@@ -536,7 +531,7 @@ func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
- if len(l.buffer) > 0 {
+ if l.currentTokenStop > l.currentTokenStart {
l.emit(tokenKeyGroupArray)
}
l.next()
@@ -559,7 +554,7 @@ func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
- if len(l.buffer) > 0 {
+ if l.currentTokenStop > l.currentTokenStart {
l.emit(tokenKeyGroup)
}
l.next()
@@ -634,7 +629,6 @@ func (l *tomlLexer) run() {
for state := l.lexVoid; state != nil; {
state = state()
}
- close(l.tokens)
}
func init() {
@@ -642,16 +636,16 @@ func init() {
}
// Entry point
-func lexToml(input io.Reader) chan token {
- bufferedInput := buffruneio.NewReader(input)
+func lexToml(inputBytes []byte) []token {
+ runes := bytes.Runes(inputBytes)
l := &tomlLexer{
- input: bufferedInput,
- tokens: make(chan token),
+ input: runes,
+ tokens: make([]token, 0, 256),
line: 1,
col: 1,
endbufferLine: 1,
endbufferCol: 1,
}
- go l.run()
+ l.run()
return l.tokens
}