aboutsummaryrefslogtreecommitdiff
path: root/vendor
diff options
context:
space:
mode:
authorNiall Sheridan <nsheridan@gmail.com>2017-01-24 23:43:28 +0000
committerNiall Sheridan <nsheridan@gmail.com>2017-01-25 00:02:37 +0000
commitf635033e3e953e74d67b76a520c9760786330af5 (patch)
tree8e41d7d07c489edbfb45f1d4ffff921b2f6bc265 /vendor
parent0f4344348419ed6c3ee4236188e456d79e2d51b4 (diff)
Switch to scl, an extension of hcl
Diffstat (limited to 'vendor')
-rw-r--r--vendor/github.com/hashicorp/hcl/hcl/parser/parser.go4
-rw-r--r--vendor/github.com/homemade/scl/LICENSE21
-rw-r--r--vendor/github.com/homemade/scl/decode.go21
-rw-r--r--vendor/github.com/homemade/scl/disk_file_system.go53
-rw-r--r--vendor/github.com/homemade/scl/doc.go37
-rw-r--r--vendor/github.com/homemade/scl/file_system.go19
-rw-r--r--vendor/github.com/homemade/scl/glide.lock34
-rw-r--r--vendor/github.com/homemade/scl/glide.yaml11
-rw-r--r--vendor/github.com/homemade/scl/parser.go612
-rw-r--r--vendor/github.com/homemade/scl/readme.markdown105
-rw-r--r--vendor/github.com/homemade/scl/scanner.go121
-rw-r--r--vendor/github.com/homemade/scl/scanner_line.go38
-rwxr-xr-xvendor/github.com/homemade/scl/scope.go277
-rw-r--r--vendor/github.com/homemade/scl/token.go39
-rw-r--r--vendor/github.com/homemade/scl/token_string.go16
-rw-r--r--vendor/github.com/homemade/scl/tokeniser.go279
-rw-r--r--vendor/vendor.json44
17 files changed, 1709 insertions, 22 deletions
diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go
index 476ed04..8dd73e0 100644
--- a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go
+++ b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go
@@ -346,7 +346,7 @@ func (p *Parser) listType() (*ast.ListType, error) {
}
}
switch tok.Type {
- case token.NUMBER, token.FLOAT, token.STRING, token.HEREDOC:
+ case token.BOOL, token.NUMBER, token.FLOAT, token.STRING, token.HEREDOC:
node, err := p.literalType()
if err != nil {
return nil, err
@@ -388,8 +388,6 @@ func (p *Parser) listType() (*ast.ListType, error) {
}
l.Add(node)
needComma = true
- case token.BOOL:
- // TODO(arslan) should we support? not supported by HCL yet
case token.LBRACK:
// TODO(arslan) should we support nested lists? Even though it's
// written in README of HCL, it's not a part of the grammar
diff --git a/vendor/github.com/homemade/scl/LICENSE b/vendor/github.com/homemade/scl/LICENSE
new file mode 100644
index 0000000..7dcf9f8
--- /dev/null
+++ b/vendor/github.com/homemade/scl/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2016 HomeMade
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/homemade/scl/decode.go b/vendor/github.com/homemade/scl/decode.go
new file mode 100644
index 0000000..cb2b49e
--- /dev/null
+++ b/vendor/github.com/homemade/scl/decode.go
@@ -0,0 +1,21 @@
+package scl
+
+import "github.com/hashicorp/hcl"
+
+/*
+DecodeFile reads the given input file and decodes it into the structure given by `out`.
+*/
+func DecodeFile(out interface{}, path string) error {
+
+ parser, err := NewParser(NewDiskSystem())
+
+ if err != nil {
+ return err
+ }
+
+ if err := parser.Parse(path); err != nil {
+ return err
+ }
+
+ return hcl.Decode(out, parser.String())
+}
diff --git a/vendor/github.com/homemade/scl/disk_file_system.go b/vendor/github.com/homemade/scl/disk_file_system.go
new file mode 100644
index 0000000..edd11f0
--- /dev/null
+++ b/vendor/github.com/homemade/scl/disk_file_system.go
@@ -0,0 +1,53 @@
+package scl
+
+import (
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+type diskFileSystem struct {
+ basePath string
+}
+
+/*
+NewDiskSystem creates a filesystem that uses the local disk, at an optional
+base path. The default base path is the current working directory.
+*/
+func NewDiskSystem(basePath ...string) FileSystem {
+
+ base := ""
+
+ if len(basePath) > 0 {
+ base = basePath[0]
+ }
+
+ return &diskFileSystem{base}
+}
+
+func (d *diskFileSystem) path(path string) string {
+ return filepath.Join(d.basePath, strings.TrimPrefix(path, d.basePath))
+}
+
+func (d *diskFileSystem) Glob(pattern string) (out []string, err error) {
+ return filepath.Glob(d.path(pattern))
+}
+
+func (d *diskFileSystem) ReadCloser(path string) (data io.ReadCloser, lastModified time.Time, err error) {
+
+ reader, err := os.Open(d.path(path))
+
+ if err != nil {
+ return nil, time.Time{}, err
+ }
+
+ stat, err := reader.Stat()
+
+ if err != nil {
+ return nil, time.Time{}, err
+ }
+
+ return reader, stat.ModTime(), nil
+}
diff --git a/vendor/github.com/homemade/scl/doc.go b/vendor/github.com/homemade/scl/doc.go
new file mode 100644
index 0000000..ec34ae2
--- /dev/null
+++ b/vendor/github.com/homemade/scl/doc.go
@@ -0,0 +1,37 @@
+/*
+Package scl is an implementation of a parser for the Sepia Configuration
+Language.
+
+SCL is a simple, declarative, self-documenting, semi-functional language that
+extends HCL (as in https://github.com/hashicorp/hcl) in the same way that Sass
+extends CSS. What that means is, any properly formatted HCL is valid SCL. If
+you really enjoy HCL, you can keep using it exclusively: under the hood, SCL
+‘compiles’ to HCL. The difference is that now you can explicitly include
+files, use ‘mixins’ to quickly inject boilerplate code, and use properly
+scoped, natural variables. The language is designed to accompany Sepia (and,
+specifically, Sepia plugins) but it's a general purpose language, and can be
+used for pretty much any configurational purpose.
+
+Full documenation for the language itself, including a language specification,
+tutorials and examples, is available at https://github.com/homemade/scl/wiki.
+*/
+package scl
+
+/*
+MixinDoc documents a mixin from a particular SCL file. Since mixins can be nested, it
+also includes a tree of all child mixins.
+*/
+type MixinDoc struct {
+ Name string
+ File string
+ Line int
+ Reference string
+ Signature string
+ Docs string
+ Children MixinDocs
+}
+
+/*
+MixinDocs is a slice of MixinDocs, for convenience.
+*/
+type MixinDocs []MixinDoc
diff --git a/vendor/github.com/homemade/scl/file_system.go b/vendor/github.com/homemade/scl/file_system.go
new file mode 100644
index 0000000..959e096
--- /dev/null
+++ b/vendor/github.com/homemade/scl/file_system.go
@@ -0,0 +1,19 @@
+package scl
+
+import (
+ "io"
+ "time"
+)
+
+/*
+A FileSystem is a representation of entities with names and content that can be
+listed using stangard glob syntax and read by name. The typical implementation
+for this is a local disk filesystem, but it could be anything – records in a
+database, objects on AWS S3, the contents of a zip file, virtual files stored
+inside a binary, and so forth. A FileSystem is required to instantiate the
+standard Parser implementation.
+*/
+type FileSystem interface {
+ Glob(pattern string) ([]string, error)
+ ReadCloser(path string) (content io.ReadCloser, lastModified time.Time, err error)
+}
diff --git a/vendor/github.com/homemade/scl/glide.lock b/vendor/github.com/homemade/scl/glide.lock
new file mode 100644
index 0000000..cc698fe
--- /dev/null
+++ b/vendor/github.com/homemade/scl/glide.lock
@@ -0,0 +1,34 @@
+hash: a63f3be588fdde1c135bba818644df041f3b39f773997e405f297237e78f1663
+updated: 2016-11-08T15:18:15.308059681Z
+imports:
+- name: github.com/aryann/difflib
+ version: 035af7c09b120b0909dd998c92745b82f61e0b1c
+- name: github.com/hashicorp/hcl
+ version: 6f5bfed9a0a22222fbe4e731ae3481730ba41e93
+ subpackages:
+ - hcl/ast
+ - hcl/parser
+ - hcl/scanner
+ - hcl/strconv
+ - hcl/token
+ - json/parser
+ - json/scanner
+ - json/token
+- name: github.com/Masterminds/vcs
+ version: cff893e7f9fc3999fe4f1f50f5b504beb67e1164
+- name: github.com/tucnak/climax
+ version: 4c021a579ddac03b8a085bebcb87d66c072341ef
+testImports:
+- name: github.com/davecgh/go-spew
+ version: 6d212800a42e8ab5c146b8ace3490ee17e5225f9
+ subpackages:
+ - spew
+- name: github.com/pmezard/go-difflib
+ version: d8ed2627bdf02c080bf22230dbb337003b7aba2d
+ subpackages:
+ - difflib
+- name: github.com/stretchr/testify
+ version: 69483b4bd14f5845b5a1e55bca19e954e827f1d0
+ subpackages:
+ - assert
+ - require
diff --git a/vendor/github.com/homemade/scl/glide.yaml b/vendor/github.com/homemade/scl/glide.yaml
new file mode 100644
index 0000000..dfeb476
--- /dev/null
+++ b/vendor/github.com/homemade/scl/glide.yaml
@@ -0,0 +1,11 @@
+package: github.com/homemade/scl
+import:
+- package: github.com/hashicorp/hcl
+ subpackages:
+ - hcl/parser
+testImport:
+- package: github.com/stretchr/testify
+ version: ~1.1.3
+ subpackages:
+ - assert
+ - require
diff --git a/vendor/github.com/homemade/scl/parser.go b/vendor/github.com/homemade/scl/parser.go
new file mode 100644
index 0000000..0304a00
--- /dev/null
+++ b/vendor/github.com/homemade/scl/parser.go
@@ -0,0 +1,612 @@
+package scl
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ "github.com/hashicorp/hcl"
+ hclparser "github.com/hashicorp/hcl/hcl/parser"
+)
+
+const (
+ builtinMixinBody = "__body__"
+ builtinMixinInclude = "include"
+ hclIndentSize = 2
+ noMixinParamValue = "_"
+)
+
+/*
+A Parser takes input in the form of filenames, variables values and include
+paths, and transforms any SCL into HCL. Generally, a program will only call
+Parse() for one file (the configuration file for that project) but it can be
+called on any number of files, each of which will add to the Parser's HCL
+output.
+
+Variables and includes paths are global for all files parsed; that is, if you
+Parse() multiple files, each of them will have access to the same set of
+variables and use the same set of include paths. The parser variables are part
+of the top-level scope: if a file changes them while it's being parsed, the
+next file will have the same variable available with the changed value.
+Similarly, if a file declares a new variable or mixin on the root scope, then
+the next file will be able to access it. This can become confusing quickly,
+so it's usually best to parse only one file and let it explicitly include
+and other files at the SCL level.
+
+SCL is an auto-documenting language, and the documentation is obtained using
+the Parser's Documentation() function. Only mixins are currently documented.
+Unlike the String() function, the documentation returned for Documentation()
+only includes the nominated file.
+*/
+type Parser interface {
+ Parse(fileName string) error
+ Documentation(fileName string) (MixinDocs, error)
+ SetParam(name, value string)
+ AddIncludePath(name string)
+ String() string
+}
+
+type parser struct {
+ fs FileSystem
+ rootScope *scope
+ output []string
+ indent int
+ includePaths []string
+}
+
+/*
+NewParser creates a new, standard Parser given a FileSystem. The most common FileSystem is
+the DiskFileSystem, but any will do. The parser opens all files and reads all
+includes using the FileSystem provided.
+*/
+func NewParser(fs FileSystem) (Parser, error) {
+
+ p := &parser{
+ fs: fs,
+ rootScope: newScope(),
+ }
+
+ return p, nil
+}
+
+func (p *parser) SetParam(name, value string) {
+ p.rootScope.setVariable(name, value)
+}
+
+func (p *parser) AddIncludePath(name string) {
+ p.includePaths = append(p.includePaths, name)
+}
+
+func (p *parser) String() string {
+ return strings.Join(p.output, "\n")
+}
+
+func (p *parser) Parse(fileName string) error {
+
+ lines, err := p.scanFile(fileName)
+
+ if err != nil {
+ return err
+ }
+
+ if err := p.parseTree(lines, newTokeniser(), p.rootScope); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (p *parser) Documentation(fileName string) (MixinDocs, error) {
+
+ docs := MixinDocs{}
+
+ lines, err := p.scanFile(fileName)
+
+ if err != nil {
+ return docs, err
+ }
+
+ if err := p.parseTreeForDocumentation(lines, newTokeniser(), &docs); err != nil {
+ return docs, err
+ }
+
+ return docs, nil
+}
+
+func (p *parser) scanFile(fileName string) (lines scannerTree, err error) {
+
+ f, _, err := p.fs.ReadCloser(fileName)
+
+ if err != nil {
+ return lines, fmt.Errorf("Can't read %s: %s", fileName, err)
+ }
+
+ defer f.Close()
+
+ lines, err = newScanner(f, fileName).scan()
+
+ if err != nil {
+ return lines, fmt.Errorf("Can't scan %s: %s", fileName, err)
+ }
+
+ return
+}
+
+func (p *parser) isValid(hclString string) error {
+
+ e := hcl.Decode(&struct{}{}, hclString)
+
+ if pe, ok := e.(*hclparser.PosError); ok {
+ return pe.Err
+ } else if pe != nil {
+ return pe
+ }
+
+ return nil
+}
+
+func (p *parser) indentedValue(literal string) string {
+ return fmt.Sprintf("%s%s", strings.Repeat(" ", p.indent*hclIndentSize), literal)
+}
+
+func (p *parser) writeLiteralToOutput(scope *scope, literal string, block bool) error {
+
+ literal, err := scope.interpolateLiteral(literal)
+
+ if err != nil {
+ return err
+ }
+
+ line := p.indentedValue(literal)
+
+ if block {
+
+ if err := p.isValid(line + "{}"); err != nil {
+ return err
+ }
+
+ line += " {"
+ p.indent++
+
+ } else {
+
+ if hashCommentMatcher.MatchString(line) {
+ // Comments are passed through directly
+ } else if err := p.isValid(line + "{}"); err == nil {
+ line = line + "{}"
+ } else if err := p.isValid(line); err != nil {
+ return err
+ }
+ }
+
+ p.output = append(p.output, line)
+
+ return nil
+}
+
+func (p *parser) endBlock() {
+ p.indent--
+ p.output = append(p.output, p.indentedValue("}"))
+}
+
+func (p *parser) err(branch *scannerLine, e string, args ...interface{}) error {
+ return fmt.Errorf("[%s] %s", branch.String(), fmt.Sprintf(e, args...))
+}
+
+func (p *parser) parseTree(tree scannerTree, tkn *tokeniser, scope *scope) error {
+
+ for _, branch := range tree {
+
+ tokens, err := tkn.tokenise(branch)
+
+ if err != nil {
+ return p.err(branch, err.Error())
+ }
+
+ if len(tokens) > 0 {
+
+ token := tokens[0]
+
+ switch token.kind {
+
+ case tokenLiteral:
+
+ if err := p.parseLiteral(branch, tkn, token, scope); err != nil {
+ return err
+ }
+
+ case tokenVariableAssignment:
+
+ value, err := scope.interpolateLiteral(tokens[1].content)
+
+ if err != nil {
+ return err
+ }
+
+ scope.setVariable(token.content, value)
+
+ case tokenVariableDeclaration:
+
+ value, err := scope.interpolateLiteral(tokens[1].content)
+
+ if err != nil {
+ return err
+ }
+
+ scope.setArgumentVariable(token.content, value)
+
+ case tokenConditionalVariableAssignment:
+
+ value, err := scope.interpolateLiteral(tokens[1].content)
+
+ if err != nil {
+ return err
+ }
+
+ if v := scope.variable(token.content); v == "" {
+ scope.setArgumentVariable(token.content, value)
+ }
+
+ case tokenMixinDeclaration:
+ if err := p.parseMixinDeclaration(branch, tokens, scope); err != nil {
+ return err
+ }
+
+ case tokenFunctionCall:
+ if err := p.parseFunctionCall(branch, tkn, tokens, scope.clone()); err != nil {
+ return err
+ }
+
+ case tokenCommentStart, tokenCommentEnd, tokenLineComment:
+ // Do nothing
+
+ default:
+ return p.err(branch, "Unexpected token: %s (%s)", token.kind, branch.content)
+ }
+ }
+ }
+
+ return nil
+}
+
+func (p *parser) parseTreeForDocumentation(tree scannerTree, tkn *tokeniser, docs *MixinDocs) error {
+
+ comments := []string{}
+
+ resetComments := func() {
+ comments = []string{}
+ }
+
+ for _, branch := range tree {
+
+ tokens, err := tkn.tokenise(branch)
+
+ if err != nil {
+ return p.err(branch, err.Error())
+ }
+
+ if len(tokens) > 0 {
+
+ token := tokens[0]
+
+ switch token.kind {
+ case tokenLineComment, tokenCommentEnd:
+ // Do nothing
+
+ case tokenCommentStart:
+ p.parseBlockComment(branch.children, &comments, branch.line, 0)
+
+ case tokenMixinDeclaration:
+
+ if token.content[0] == '_' {
+ resetComments()
+ continue
+ }
+
+ doc := MixinDoc{
+ Name: token.content,
+ File: branch.file,
+ Line: branch.line,
+ Reference: branch.String(),
+ Signature: string(branch.content),
+ Docs: strings.Join(comments, "\n"),
+ }
+
+ // Clear comments
+ resetComments()
+
+ // Store the mixin docs and empty the running comment
+ if err := p.parseTreeForDocumentation(branch.children, tkn, &doc.Children); err != nil {
+ return err
+ }
+
+ *docs = append(*docs, doc)
+
+ default:
+ resetComments()
+ if err := p.parseTreeForDocumentation(branch.children, tkn, docs); err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ return nil
+}
+
+func (p *parser) parseBlockComment(tree scannerTree, comments *[]string, line, indentation int) error {
+
+ for _, branch := range tree {
+
+ // Re-add missing blank lines
+ if line == 0 {
+ line = branch.line
+ } else {
+ if line != branch.line-1 {
+ *comments = append(*comments, "")
+ }
+ line = branch.line
+ }
+
+ *comments = append(*comments, strings.Repeat(" ", indentation*4)+string(branch.content))
+
+ if err := p.parseBlockComment(branch.children, comments, line, indentation+1); err != nil {
+ return nil
+ }
+ }
+
+ return nil
+}
+
+func (p *parser) parseLiteral(branch *scannerLine, tkn *tokeniser, token token, scope *scope) error {
+
+ children := len(branch.children) > 0
+
+ if err := p.writeLiteralToOutput(scope, token.content, children); err != nil {
+ return p.err(branch, err.Error())
+ }
+
+ if children {
+
+ if err := p.parseTree(branch.children, tkn, scope.clone()); err != nil {
+ return err
+ }
+
+ p.endBlock()
+ }
+
+ return nil
+}
+
+func (p *parser) parseMixinDeclaration(branch *scannerLine, tokens []token, scope *scope) error {
+
+ i := 0
+ literalExpected := false
+ optionalArgStart := false
+
+ var (
+ arguments []token
+ defaults []string
+ current token
+ )
+
+ // Make sure that only variables are given as arguments
+ for _, v := range tokens[1:] {
+
+ switch v.kind {
+
+ case tokenLiteral:
+ if !literalExpected {
+ return p.err(branch, "Argument declaration %d [%s]: Unexpected literal", i, v.content)
+ }
+
+ value := v.content
+
+ // Underscore literals are 'no values' in mixin
+ // declarations
+ if value == noMixinParamValue {
+ value = ""
+ }
+
+ arguments = append(arguments, current)
+ defaults = append(defaults, value)
+ literalExpected = false
+
+ case tokenVariableAssignment:
+ optionalArgStart = true
+ literalExpected = true
+ current = token{
+ kind: tokenVariable,
+ content: v.content,
+ line: v.line,
+ }
+ i++
+
+ case tokenVariable:
+
+ if optionalArgStart {
+ return p.err(branch, "Argument declaration %d [%s]: A required argument can't follow an optional argument", i, v.content)
+ }
+
+ arguments = append(arguments, v)
+ defaults = append(defaults, "")
+ i++
+
+ default:
+ return p.err(branch, "Argument declaration %d [%s] is not a variable or a variable assignment", i, v.content)
+ }
+ }
+
+ if literalExpected {
+ return p.err(branch, "Expected a literal in mixin signature")
+ }
+
+ if a, d := len(arguments), len(defaults); a != d {
+ return p.err(branch, "Expected eqaual numbers of arguments and defaults (a:%d,d:%d)", a, d)
+ }
+
+ scope.setMixin(tokens[0].content, branch, arguments, defaults)
+
+ return nil
+}
+
+func (p *parser) parseFunctionCall(branch *scannerLine, tkn *tokeniser, tokens []token, scope *scope) error {
+
+ // Handle built-ins
+ if tokens[0].content == builtinMixinBody {
+ return p.parseBodyCall(branch, tkn, scope)
+ } else if tokens[0].content == builtinMixinInclude {
+ return p.parseIncludeCall(branch, tokens, scope)
+ }
+
+ // Make sure the mixin exists in the scope
+ mx, err := scope.mixin(tokens[0].content)
+
+ if err != nil {
+ return p.err(branch, err.Error())
+ }
+
+ args, err := p.extractValuesFromArgTokens(branch, tokens[1:], scope)
+
+ if err != nil {
+ return p.err(branch, err.Error())
+ }
+
+ // Add in the defaults
+ if l := len(args); l < len(mx.defaults) {
+ args = append(args, mx.defaults[l:]...)
+ }
+
+ // Check the argument counts
+ if r, g := len(mx.arguments), len(args); r != g {
+ return p.err(branch, "Wrong number of arguments for %s (required %d, got %d)", tokens[0].content, r, g)
+ }
+
+ // Set the argument values
+ for i := 0; i < len(mx.arguments); i++ {
+ scope.setArgumentVariable(mx.arguments[i].name, args[i])
+ }
+
+ // Set an anchor branch for the __body__ built-in
+ scope.branch = branch
+ scope.branchScope = scope.parent
+
+ // Call the function!
+ return p.parseTree(mx.declaration.children, tkn, scope)
+}
+
+func (p *parser) parseBodyCall(branch *scannerLine, tkn *tokeniser, scope *scope) error {
+
+ if scope.branchScope == nil {
+ return p.err(branch, "Unexpected error: No parent scope somehow!")
+ }
+
+ if scope.branch == nil {
+ return p.err(branch, "Unexpected error: No anchor branch!")
+ }
+
+ s := scope.branchScope.clone()
+ s.mixins = scope.mixins
+ s.variables = scope.variables // FIXME Merge?
+
+ return p.parseTree(scope.branch.children, tkn, s)
+}
+
+func (p *parser) includeGlob(name string, branch *scannerLine) error {
+
+ name = strings.TrimSuffix(strings.Trim(name, `"'`), ".scl") + ".scl"
+
+ vendorPath := []string{filepath.Join(filepath.Dir(branch.file), "vendor")}
+ vendorPath = append(vendorPath, p.includePaths...)
+
+ var paths []string
+
+ for _, ip := range vendorPath {
+
+ ipaths, err := p.fs.Glob(ip + "/" + name)
+
+ if err != nil {
+ return err
+ }
+
+ if len(ipaths) > 0 {
+ paths = ipaths
+ break
+ }
+ }
+
+ if len(paths) == 0 {
+
+ var err error
+ paths, err = p.fs.Glob(name)
+
+ if err != nil {
+ return err
+ }
+ }
+
+ if len(paths) == 0 {
+ return fmt.Errorf("Can't read %s: no files found", name)
+ }
+
+ for _, path := range paths {
+ if err := p.Parse(path); err != nil {
+ return fmt.Errorf(err.Error())
+ }
+ }
+
+ return nil
+}
+
+func (p *parser) parseIncludeCall(branch *scannerLine, tokens []token, scope *scope) error {
+
+ args, err := p.extractValuesFromArgTokens(branch, tokens[1:], scope)
+
+ if err != nil {
+ return p.err(branch, err.Error())
+ }
+
+ for _, v := range args {
+
+ if err := p.includeGlob(v, branch); err != nil {
+ return p.err(branch, err.Error())
+ }
+ }
+
+ return nil
+}
+
+func (p *parser) extractValuesFromArgTokens(branch *scannerLine, tokens []token, scope *scope) ([]string, error) {
+
+ var args []string
+
+ for _, v := range tokens {
+ switch v.kind {
+
+ case tokenLiteral:
+
+ value, err := scope.interpolateLiteral(v.content)
+
+ if err != nil {
+ return args, err
+ }
+
+ args = append(args, value)
+
+ case tokenVariable:
+
+ value := scope.variable(v.content)
+
+ if value == "" {
+ return args, fmt.Errorf("Variable $%s is not declared in this scope", v.content)
+ }
+
+ args = append(args, value)
+
+ default:
+ return args, fmt.Errorf("Invalid token type for function argument: %s (%s)", v.kind, branch.content)
+ }
+ }
+
+ return args, nil
+}
diff --git a/vendor/github.com/homemade/scl/readme.markdown b/vendor/github.com/homemade/scl/readme.markdown
new file mode 100644
index 0000000..3c0ddc9
--- /dev/null
+++ b/vendor/github.com/homemade/scl/readme.markdown
@@ -0,0 +1,105 @@
+[![Build Status](https://travis-ci.org/homemade/scl.svg?branch=master)](https://travis-ci.org/homemade/scl) [![Coverage Status](https://coveralls.io/repos/github/homemade/scl/badge.svg?branch=master)](https://coveralls.io/github/homemade/scl?branch=master) [![GoDoc](https://godoc.org/github.com/homemade/scl?status.svg)](https://godoc.org/github.com/homemade/scl) [![Language reference](https://img.shields.io/badge/language-reference-736caf.svg)](https://github.com/homemade/scl/wiki)
+
+## Sepia Configuration Language
+
+The Sepia Configuration Language is a simple, declarative, semi-functional, self-documenting language that extends HashiCorp's [HCL](https://github.com/hashicorp/hcl) in the same sort of way that Sass extends CSS. The syntax of SCL is concise, intuitive and flexible. Critically, it also validates much of your configuration by design, so it's harder to configure an application that seems like it should work &mdash; but doesn't.
+
+SCL transpiles to HCL and, like CSS and Sass, any [properly formatted](https://github.com/fatih/hclfmt) HCL is valid SCL. If you have an existing HCL setup, you can transplant it to SCL directly and then start making use of the code organisation, mixins, and properly scoped variables that SCL offers.
+
+In addition to the language itself, there is a useful [command-line tool](https://github.com/homemade/scl/tree/master/cmd/scl) than can compile your .scl files and write the output to the terminal, run gold standard tests against you code, and even fetch libraries of code from public version control systems.
+
+This readme is concerned with the technical implementation of the Go package and the CLI tool. For a full language specification complete with examples and diagrams, see the [wiki](https://github.com/homemade/scl/wiki).
+
+## Installation
+
+Assuming you have Go installed, the package and CLI tool can be fetched in the usual way:
+
+```
+$ go get -u github.com/homemade/scl/...
+```
+
+## Contributions
+
+This is fairly new software that has been tested intensively over a fairly narrow range of functions. Minor bugs are expected! If you have any suggestions or feature requests [please open an issue](https://github.com/homemade/scl/issues/new). Pull requests for bug fixes or uncontroversial improvements are appreciated.
+
+We're currently working on standard libraries for Terraform and Hugo. If you build an SCL library for anything else, please let us know!
+
+## Using SCL in your application
+
+SCL is built on top of HCL, and the fundamental procedure for using it is the more or less the same: SCL code is decoded into a Go struct, informed by `hcl` tags on the struct's fields. A trivially simple example is as follows:
+
+``` go
+myConfigObject := struct {
+ SomeVariable int `hcl:"some_variable"`
+}{}
+
+if err := scl.DecodeFile(&myConfigObject, "/path/to/a/config/file.scl"); err != nil {
+ // handle error
+}
+
+// myConfigObject is now populated!
+```
+
+There are many more options&mdash;like include paths, predefined variables and documentation generation&mdash;available in the [API](https://godoc.org/github.com/homemade/scl). If you have an existing HCL set up in your application, you can easily swap out your HCL loading function for an SCL loading function to try it out!
+
+## CLI tool
+
+The tool, which is installed with the package, is named `scl`. With it, you can transpile .scl files to stdout, run gold standard tests that compare .scl files to .hcl files, and fetch external libraries from version control.
+
+### Usage
+
+Run `scl` for a command syntax.
+
+### Examples
+
+Basic example:
+```
+$ scl run $GOPATH/src/bitbucket.org/homemade/scl/fixtures/valid/basic.scl
+/* .../bitbucket.org/homemade/scl/fixtures/valid/basic.scl */
+wrapper {
+ inner = "yes"
+ another = "1" {
+ yet_another = "123"
+ }
+}
+```
+
+Adding includes:
+```
+$ scl run -include $GOPATH/src/bitbucket.org/homemade/scl $GOPATH/src/bitbucket.org/homemade/scl/fixtures/valid/import.scl
+/* .../bitbucket.org/homemade/scl/fixtures/valid/import.scl */
+wrapper {
+ inner = "yes"
+ another = "1" {
+ yet_another = "123"
+ }
+}
+output = "this is from simpleMixin"
+```
+
+Adding params via cli flags:
+```
+$ scl run -param myVar=1 $GOPATH/src/bitbucket.org/homemade/scl/fixtures/valid/variables.scl
+/* .../bitbucket.org/homemade/scl/fixtures/valid/variables.scl */
+outer {
+ inner = 1
+}
+```
+
+Adding params via environmental variables:
+```
+$ myVar=1 scl run $GOPATH/src/bitbucket.org/homemade/scl/fixtures/valid/variables.scl
+/* .../bitbucket.org/homemade/scl/fixtures/valid/variables.scl */
+outer {
+ inner = 1
+}
+```
+
+Skipping environmental variable slurping:
+```
+$ myVar=1 scl run -no-env -param myVar=2 $GOPATH/src/bitbucket.org/homemade/scl/fixtures/valid/variables.scl
+/* .../src/bitbucket.org/homemade/scl/fixtures/valid/variables.scl */
+outer {
+ inner = 2
+}
+```
diff --git a/vendor/github.com/homemade/scl/scanner.go b/vendor/github.com/homemade/scl/scanner.go
new file mode 100644
index 0000000..7dddf59
--- /dev/null
+++ b/vendor/github.com/homemade/scl/scanner.go
@@ -0,0 +1,121 @@
+package scl
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "strings"
+)
+
+type scannerTree []*scannerLine
+
+type scanner struct {
+ file string
+ reader io.Reader
+ lines scannerTree
+}
+
+func newScanner(reader io.Reader, filename ...string) *scanner {
+
+ file := "<no file>"
+
+ if len(filename) > 0 {
+ file = filename[0]
+ }
+
+ s := scanner{
+ file: file,
+ reader: reader,
+ lines: make(scannerTree, 0),
+ }
+
+ return &s
+}
+
+func (s *scanner) scan() (lines scannerTree, err error) {
+
+ // Split to lines
+ scanner := bufio.NewScanner(s.reader)
+ scanner.Split(bufio.ScanLines)
+
+ lineNumber := 0
+ rawLines := make(scannerTree, 0)
+
+ heredoc := ""
+ heredocContent := ""
+ heredocLine := 0
+
+ for scanner.Scan() {
+ lineNumber++
+
+ if heredoc != "" {
+ heredocContent += "\n" + scanner.Text()
+
+ if strings.TrimSpace(scanner.Text()) == heredoc {
+ // HCL requires heredocs to be terminated with a newline
+ rawLines = append(rawLines, newLine(s.file, lineNumber, 0, heredocContent+"\n"))
+ heredoc = ""
+ heredocContent = ""
+ }
+
+ continue
+ }
+
+ text := strings.TrimRight(scanner.Text(), " \t{}")
+
+ if text == "" {
+ continue
+ }
+
+ if matches := heredocMatcher.FindAllStringSubmatch(text, -1); matches != nil {
+ heredoc = matches[0][1]
+ heredocContent = text
+ heredocLine = lineNumber
+ continue
+ }
+
+ rawLines = append(rawLines, newLine(s.file, lineNumber, 0, text))
+ }
+
+ if heredoc != "" {
+ return lines, fmt.Errorf("Heredoc '%s' (started line %d) not terminated", heredoc, heredocLine)
+ }
+
+ // Make sure the first line has no indent
+ if len(rawLines) > 0 {
+ index := 0
+ s.indentLines(&index, rawLines, &lines, rawLines[0].content.indent())
+ }
+
+ return
+}
+
+func (s *scanner) indentLines(index *int, input scannerTree, output *scannerTree, indent int) {
+
+ // Ends when there are no more lines
+ if *index >= len(input) {
+ return
+ }
+
+ var lineToAdd *scannerLine
+
+ for ; *index < len(input); *index++ {
+
+ lineIndent := input[*index].content.indent()
+
+ if lineIndent == indent {
+ lineToAdd = input[*index].branch()
+ *output = append(*output, lineToAdd)
+
+ } else if lineIndent > indent {
+ s.indentLines(index, input, &lineToAdd.children, lineIndent)
+
+ } else if lineIndent < indent {
+ *index--
+ return
+ }
+
+ }
+
+ return
+}
diff --git a/vendor/github.com/homemade/scl/scanner_line.go b/vendor/github.com/homemade/scl/scanner_line.go
new file mode 100644
index 0000000..972c766
--- /dev/null
+++ b/vendor/github.com/homemade/scl/scanner_line.go
@@ -0,0 +1,38 @@
+package scl
+
+import (
+ "fmt"
+ "strings"
+)
+
+type lineContent string
+
+func (s lineContent) indent() int {
+ return len(s) - len(strings.TrimLeft(string(s), " \t"))
+}
+
+type scannerLine struct {
+ file string
+ line int
+ column int
+ content lineContent
+ children scannerTree
+}
+
+func newLine(fileName string, lineNumber, column int, content string) *scannerLine {
+ return &scannerLine{
+ file: fileName,
+ line: lineNumber,
+ column: column,
+ content: lineContent(content),
+ children: make(scannerTree, 0),
+ }
+}
+
+func (l *scannerLine) branch() *scannerLine {
+ return newLine(l.file, l.line, l.content.indent(), strings.Trim(string(l.content), " \t"))
+}
+
+func (l *scannerLine) String() string {
+ return fmt.Sprintf("%s:%d", l.file, l.line)
+}
diff --git a/vendor/github.com/homemade/scl/scope.go b/vendor/github.com/homemade/scl/scope.go
new file mode 100755
index 0000000..e8105ae
--- /dev/null
+++ b/vendor/github.com/homemade/scl/scope.go
@@ -0,0 +1,277 @@
+package scl
+
+import (
+ "fmt"
+ "unicode"
+)
+
+type variable struct {
+ name string
+ value string
+}
+
+type mixin struct {
+ declaration *scannerLine
+ arguments []variable
+ defaults []string
+}
+
+type scope struct {
+ parent *scope
+ branch *scannerLine
+ branchScope *scope
+ variables map[string]*variable
+ mixins map[string]*mixin
+}
+
+func newScope() *scope {
+ return &scope{
+ variables: make(map[string]*variable),
+ mixins: make(map[string]*mixin),
+ }
+}
+
+func (s *scope) setArgumentVariable(name, value string) {
+ s.variables[name] = &variable{name, value}
+}
+
+func (s *scope) setVariable(name, value string) {
+
+ v, ok := s.variables[name]
+
+ if !ok || v == nil {
+ s.variables[name] = &variable{name, value}
+ } else {
+ s.variables[name].value = value
+ }
+}
+
+func (s *scope) variable(name string) string {
+
+ value, ok := s.variables[name]
+
+ if !ok || value == nil {
+ return ""
+ }
+
+ return s.variables[name].value
+}
+
+func (s *scope) setMixin(name string, declaration *scannerLine, argumentTokens []token, defaults []string) {
+
+ mixin := &mixin{
+ declaration: declaration,
+ defaults: defaults,
+ }
+
+ for _, t := range argumentTokens {
+ mixin.arguments = append(mixin.arguments, variable{name: t.content})
+ }
+
+ s.mixins[name] = mixin
+}
+
+func (s *scope) removeMixin(name string) {
+ delete(s.mixins, name)
+}
+
+func (s *scope) mixin(name string) (*mixin, error) {
+
+ m, ok := s.mixins[name]
+
+ if !ok {
+ return nil, fmt.Errorf("Mixin %s not declared in this scope", name)
+ }
+
+ return m, nil
+}
+
+func (s *scope) interpolateLiteral(literal string) (outp string, err error) {
+
+ isVariableChar := func(c rune) bool {
+ return unicode.IsLetter(c) || unicode.IsDigit(c) || c == '_'
+ }
+
+ unknownVariable := func(name []byte) {
+ err = fmt.Errorf("Unknown variable '$%s'", name)
+ }
+
+ unfinishedVariable := func(name []byte) {
+ err = fmt.Errorf("Expecting closing right brace in variable ${%s}", name)
+ }
+
+ result := func() (result []byte) {
+
+ var (
+ backSlash = '\\'
+ dollar = '$'
+ leftBrace = '{'
+ rightBrace = '}'
+ backtick = '`'
+ slashEscaped = false
+
+ variableStarted = false
+ variableIsBraceEscaped = false
+ variable = []byte{}
+ literalStarted = false
+ )
+
+ for _, c := range []byte(literal) {
+
+ if literalStarted {
+
+ if rune(c) == backtick {
+ literalStarted = false
+ continue
+ }
+
+ result = append(result, c)
+ continue
+ }
+
+ if variableStarted {
+
+ if len(variable) == 0 {
+
+ // If the first character is a dollar, then this
+ // is a $$var escape
+ if rune(c) == dollar {
+ variableStarted = false
+ variableIsBraceEscaped = false
+
+ // Write out two dollars – one for the skipped var
+ // signifier, and the current one
+ result = append(result, byte(dollar))
+ continue
+ }
+
+ // If the first character is a curl brace,
+ // it's the start of a ${var} syntax
+ if !variableIsBraceEscaped {
+ if rune(c) == leftBrace {
+ variableIsBraceEscaped = true
+ continue
+ } else {
+ variableIsBraceEscaped = false
+ }
+ }
+ }
+
+ // If this is a valid variable character,
+ // add it to the variable building
+ if isVariableChar(rune(c)) {
+ variable = append(variable, c)
+ continue
+ }
+
+ // If the variable is zero length, then it's a dollar literal
+ if len(variable) == 0 {
+ variableStarted = false
+ variableIsBraceEscaped = false
+ result = append(result, byte(dollar), c)
+ continue
+ }
+
+ // Brace-escaped variables must end with a closing brace
+ if variableIsBraceEscaped {
+ if rune(c) != rightBrace {
+ unfinishedVariable(variable)
+ return
+ }
+ }
+
+ writeOutput := !variableIsBraceEscaped
+
+ // The variable has ended
+ variableStarted = false
+ variableIsBraceEscaped = false
+
+ // The variable is complete; look up its value
+ if replacement := s.variable(string(variable)); replacement != "" {
+ result = append(result, []byte(replacement)...)
+
+ if writeOutput {
+ result = append(result, c)
+ }
+
+ continue
+ }
+
+ unknownVariable(variable)
+ return
+ }
+
+ if slashEscaped {
+ result = append(result, c)
+ slashEscaped = false
+ continue
+ }
+
+ switch rune(c) {
+ case backSlash:
+ slashEscaped = true
+ continue
+
+ case dollar:
+ variableStarted, variable = true, []byte{}
+ continue
+
+ case backtick:
+ literalStarted = true
+ continue
+ }
+
+ result = append(result, c)
+
+ slashEscaped = false
+ }
+
+ if literalStarted {
+ err = fmt.Errorf("Unterminated backtick literal")
+ return
+ }
+
+ // If the last character is a slash, add it
+ if slashEscaped {
+ result = append(result, byte(backSlash))
+ }
+
+ // The string ended mid-variable, so add it if possible
+ if variableStarted {
+
+ if variableIsBraceEscaped {
+ unfinishedVariable(variable)
+ return
+ } else if replacement := s.variable(string(variable)); replacement != "" {
+ result = append(result, []byte(replacement)...)
+ } else {
+ unknownVariable(variable)
+ return
+ }
+ }
+
+ return
+ }()
+
+ outp = string(result)
+
+ return
+}
+
+func (s *scope) clone() *scope {
+
+ s2 := newScope()
+ s2.parent = s
+ s2.branch = s.branch
+ s2.branchScope = s.branchScope
+
+ for k, v := range s.variables {
+ s2.variables[k] = v
+ }
+
+ for k, v := range s.mixins {
+ s2.mixins[k] = v
+ }
+
+ return s2
+}
diff --git a/vendor/github.com/homemade/scl/token.go b/vendor/github.com/homemade/scl/token.go
new file mode 100644
index 0000000..77d5ab2
--- /dev/null
+++ b/vendor/github.com/homemade/scl/token.go
@@ -0,0 +1,39 @@
+package scl
+
+//go:generate stringer -type=tokenKind -output=token_string.go
+type tokenKind int
+
+const (
+ tokenLineComment tokenKind = iota
+ tokenMixinDeclaration
+ tokenVariable
+ tokenVariableAssignment
+ tokenFunctionCall
+ tokenLiteral
+ tokenVariableDeclaration
+ tokenConditionalVariableAssignment
+ tokenCommentStart
+ tokenCommentEnd
+)
+
+var tokenKindsByString = map[tokenKind]string{
+ tokenLineComment: "line comment",
+ tokenMixinDeclaration: "mixin declaration",
+ tokenVariableAssignment: "variable assignment",
+ tokenVariableDeclaration: "variable declaration",
+ tokenConditionalVariableAssignment: "conditional variable declaration",
+ tokenFunctionCall: "function call",
+ tokenLiteral: "literal",
+ tokenCommentStart: "comment start",
+ tokenCommentEnd: "comment end",
+}
+
+type token struct {
+ kind tokenKind
+ content string
+ line *scannerLine
+}
+
+func (t token) String() string {
+ return tokenKindsByString[t.kind]
+}
diff --git a/vendor/github.com/homemade/scl/token_string.go b/vendor/github.com/homemade/scl/token_string.go
new file mode 100644
index 0000000..c959de6
--- /dev/null
+++ b/vendor/github.com/homemade/scl/token_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type=tokenKind -output=token_string.go"; DO NOT EDIT
+
+package scl
+
+import "fmt"
+
+const _tokenKind_name = "tokenLineCommenttokenMixinDeclarationtokenVariabletokenVariableAssignmenttokenFunctionCalltokenLiteraltokenVariableDeclarationtokenConditionalVariableAssignmenttokenCommentStarttokenCommentEnd"
+
+var _tokenKind_index = [...]uint8{0, 16, 37, 50, 73, 90, 102, 126, 160, 177, 192}
+
+func (i tokenKind) String() string {
+ if i < 0 || i >= tokenKind(len(_tokenKind_index)-1) {
+ return fmt.Sprintf("tokenKind(%d)", i)
+ }
+ return _tokenKind_name[_tokenKind_index[i]:_tokenKind_index[i+1]]
+}
diff --git a/vendor/github.com/homemade/scl/tokeniser.go b/vendor/github.com/homemade/scl/tokeniser.go
new file mode 100644
index 0000000..684c7dd
--- /dev/null
+++ b/vendor/github.com/homemade/scl/tokeniser.go
@@ -0,0 +1,279 @@
+package scl
+
+import (
+ "fmt"
+ "regexp"
+ "strings"
+ "unicode"
+)
+
+var hashCommentMatcher = regexp.MustCompile(`#.+$`)
+var functionMatcher = regexp.MustCompile(`^([a-zA-Z0-9_]+)\s?\((.*)\):?$`)
+var shortFunctionMatcher = regexp.MustCompile(`^([a-zA-Z0-9_]+):$`)
+var variableMatcher = regexp.MustCompile(`^\$([a-zA-Z_][a-zA-Z0-9_]*)$`)
+var assignmentMatcher = regexp.MustCompile(`^\$([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*((.|\n)+)$`)
+var declarationMatcher = regexp.MustCompile(`^\$([a-zA-Z_][a-zA-Z0-9_]*)\s*:=\s*(.+)$`)
+var conditionalVariableMatcher = regexp.MustCompile(`^\$([a-zA-Z_0-9]+)\s*\?=\s*(.+)$`)
+var docblockStartMatcher = regexp.MustCompile(`^/\*$`)
+var docblockEndMatcher = regexp.MustCompile(`^\*\/$`)
+var heredocMatcher = regexp.MustCompile(`<<([a-zA-Z]+)\s*$`)
+
+type tokeniser struct {
+ accruedComment []string
+}
+
+func newTokeniser() *tokeniser {
+ return &tokeniser{}
+}
+
+func (t *tokeniser) resetComment() {
+ t.accruedComment = make([]string, 0)
+}
+
+func (t *tokeniser) stripComments(l *scannerLine) string {
+
+ lastQuote := rune(0)
+ slash := rune(47)
+ slashCount := 0
+
+ result := func() (result []byte) {
+
+ for i, v := range []byte(l.content) {
+
+ c := rune(v)
+
+ switch {
+ case c == lastQuote:
+ lastQuote = rune(0)
+ slashCount = 0
+
+ case unicode.In(c, unicode.Quotation_Mark):
+ lastQuote = c
+ slashCount = 0
+
+ case c == slash && lastQuote == rune(0):
+
+ slashCount++
+ if slashCount == 2 {
+ return result[0:(i - 1)]
+ }
+
+ default:
+ slashCount = 0
+ }
+
+ result = append(result, v)
+ }
+
+ return
+ }()
+
+ return strings.Trim(string(result), " ")
+}
+
+func (t *tokeniser) tokenise(l *scannerLine) (tokens []token, err error) {
+
+ // Remove comments
+ content := t.stripComments(l)
+
+ // If the string is empty, the entire line was a comment
+ if content == "" {
+ return []token{
+ token{
+ kind: tokenLineComment,
+ content: strings.TrimLeft(string(l.content), "/ "),
+ line: l,
+ },
+ }, nil
+ }
+
+ if docblockStartMatcher.MatchString(content) {
+ return t.tokeniseCommentStart(l, lineContent(content))
+ }
+
+ if docblockEndMatcher.MatchString(content) {
+ return t.tokeniseCommentEnd(l, lineContent(content))
+ }
+
+ // Mixin declarations start with a @
+ if content[0] == '@' {
+ return t.tokeniseMixinDeclaration(l, lineContent(content))
+ }
+
+ if shortFunctionMatcher.MatchString(content) {
+ return t.tokeniseShortFunctionCall(l, lineContent(content))
+ }
+
+ if functionMatcher.MatchString(content) {
+ return t.tokeniseFunctionCall(l, lineContent(content))
+ }
+
+ if assignmentMatcher.MatchString(content) {
+ return t.tokeniseVariableAssignment(l, lineContent(content))
+ }
+
+ if declarationMatcher.MatchString(content) {
+ return t.tokeniseVariableDeclaration(l, lineContent(content))
+ }
+
+ if conditionalVariableMatcher.MatchString(content) {
+ return t.tokeniseConditionalVariableAssignment(l, lineContent(content))
+ }
+
+ // Assume the result is a literal
+ return []token{
+ token{kind: tokenLiteral, content: content, line: l},
+ }, nil
+}
+
+func (t *tokeniser) tokeniseCommentStart(l *scannerLine, content lineContent) (tokens []token, err error) {
+ tokens = append(tokens, token{kind: tokenCommentStart, line: l})
+ return
+}
+
+func (t *tokeniser) tokeniseCommentEnd(l *scannerLine, content lineContent) (tokens []token, err error) {
+ tokens = append(tokens, token{kind: tokenCommentEnd, line: l})
+ return
+}
+
+func (t *tokeniser) tokeniseFunction(l *scannerLine, input string) (name string, tokens []token, err error) {
+
+ parts := functionMatcher.FindStringSubmatch(input)
+
+ if len(parts) < 2 {
+ return "", tokens, fmt.Errorf("Can't parse function signature")
+ }
+
+ name = parts[1]
+
+ if len(parts) == 3 && parts[2] != "" {
+
+ lastQuote := rune(0)
+ comma := rune(0x2c)
+ leftBracket := rune(0x5b)
+ rightBracket := rune(0x5d)
+
+ f := func(c rune) bool {
+
+ switch {
+ case c == lastQuote:
+ lastQuote = rune(0)
+ return false
+ case lastQuote != rune(0):
+ return false
+ case unicode.In(c, unicode.Quotation_Mark):
+ lastQuote = c
+ return false
+ case c == leftBracket:
+ lastQuote = rightBracket
+ return false
+ case c == comma:
+ return true
+ default:
+ return false
+
+ }
+ }
+
+ arguments := strings.FieldsFunc(parts[2], f)
+
+ for _, arg := range arguments {
+
+ arg = strings.Trim(arg, " \t")
+
+ if matches := variableMatcher.FindStringSubmatch(arg); len(matches) > 1 {
+ tokens = append(tokens, token{kind: tokenVariable, content: matches[1], line: l})
+ } else if matches := assignmentMatcher.FindStringSubmatch(arg); len(matches) > 1 {
+ tokens = append(tokens, token{kind: tokenVariableAssignment, content: matches[1], line: l})
+ tokens = append(tokens, token{kind: tokenLiteral, content: matches[2], line: l})
+ } else {
+ tokens = append(tokens, token{kind: tokenLiteral, content: arg, line: l})
+ }
+ }
+ }
+
+ return
+}
+
+func (t *tokeniser) tokeniseMixinDeclaration(l *scannerLine, content lineContent) (tokens []token, err error) {
+
+ name, fntokens, fnerr := t.tokeniseFunction(l, string(content)[1:])
+
+ if fnerr != nil {
+ return tokens, fmt.Errorf("%s: %s", l, fnerr)
+ }
+
+ tokens = append(tokens, token{kind: tokenMixinDeclaration, content: name, line: l})
+ tokens = append(tokens, fntokens...)
+
+ return
+}
+
+func (t *tokeniser) tokeniseFunctionCall(l *scannerLine, content lineContent) (tokens []token, err error) {
+
+ name, fntokens, fnerr := t.tokeniseFunction(l, string(content))
+
+ if fnerr != nil {
+ return tokens, fmt.Errorf("%s: %s", l, fnerr)
+ }
+
+ tokens = append(tokens, token{kind: tokenFunctionCall, content: name, line: l})
+ tokens = append(tokens, fntokens...)
+
+ return
+}
+
+func (t *tokeniser) tokeniseShortFunctionCall(l *scannerLine, content lineContent) (tokens []token, err error) {
+
+ parts := shortFunctionMatcher.FindStringSubmatch(string(content))
+
+ if len(parts) > 0 {
+ return []token{
+ token{kind: tokenFunctionCall, content: parts[1], line: l},
+ }, nil
+ }
+
+ return tokens, fmt.Errorf("Failed to parse short function call")
+}
+
+func (t *tokeniser) tokeniseVariableAssignment(l *scannerLine, content lineContent) (tokens []token, err error) {
+
+ parts := assignmentMatcher.FindStringSubmatch(string(content))
+
+ if len(parts) > 0 {
+ return []token{
+ token{kind: tokenVariableAssignment, content: parts[1], line: l},
+ token{kind: tokenLiteral, content: parts[2], line: l},
+ }, nil
+ }
+
+ return tokens, fmt.Errorf("Failed to parse variable assignment")
+}
+
+func (t *tokeniser) tokeniseVariableDeclaration(l *scannerLine, content lineContent) (tokens []token, err error) {
+
+ parts := declarationMatcher.FindStringSubmatch(string(content))
+
+ if len(parts) > 0 {
+ return []token{
+ token{kind: tokenVariableDeclaration, content: parts[1], line: l},
+ token{kind: tokenLiteral, content: parts[2], line: l},
+ }, nil
+ }
+
+ return tokens, fmt.Errorf("Failed to parse variable declaration")
+}
+
+func (t *tokeniser) tokeniseConditionalVariableAssignment(l *scannerLine, content lineContent) (tokens []token, err error) {
+
+ parts := conditionalVariableMatcher.FindStringSubmatch(string(content))
+
+ if len(parts) > 0 {
+ return []token{
+ token{kind: tokenConditionalVariableAssignment, content: parts[1], line: l},
+ token{kind: tokenLiteral, content: parts[2], line: l},
+ }, nil
+ }
+
+ return tokens, fmt.Errorf("Failed to parse conditional variable assignment")
+}
diff --git a/vendor/vendor.json b/vendor/vendor.json
index 84c528b..85f9bdd 100644
--- a/vendor/vendor.json
+++ b/vendor/vendor.json
@@ -293,56 +293,56 @@
{
"checksumSHA1": "Ok3Csn6Voou7pQT6Dv2mkwpqFtw=",
"path": "github.com/hashicorp/hcl",
- "revision": "80e628d796135357b3d2e33a985c666b9f35eee1",
- "revisionTime": "2016-12-15T22:58:39Z"
+ "revision": "39fa3a62ba92cf550eb0f9cfb84757ef79b8aa30",
+ "revisionTime": "2017-01-20T01:07:30Z"
},
{
"checksumSHA1": "XQmjDva9JCGGkIecOgwtBEMCJhU=",
"path": "github.com/hashicorp/hcl/hcl/ast",
- "revision": "80e628d796135357b3d2e33a985c666b9f35eee1",
- "revisionTime": "2016-12-15T22:58:39Z"
+ "revision": "39fa3a62ba92cf550eb0f9cfb84757ef79b8aa30",
+ "revisionTime": "2017-01-20T01:07:30Z"
},
{
- "checksumSHA1": "vF6LLywGDoAaccTcAGrcY7mYvZc=",
+ "checksumSHA1": "MPz4qnNmoYHHUXDhHj0TpJk4LHk=",
"path": "github.com/hashicorp/hcl/hcl/parser",
- "revision": "80e628d796135357b3d2e33a985c666b9f35eee1",
- "revisionTime": "2016-12-15T22:58:39Z"
+ "revision": "39fa3a62ba92cf550eb0f9cfb84757ef79b8aa30",
+ "revisionTime": "2017-01-20T01:07:30Z"
},
{
"checksumSHA1": "z6wdP4mRw4GVjShkNHDaOWkbxS0=",
"path": "github.com/hashicorp/hcl/hcl/scanner",
- "revision": "80e628d796135357b3d2e33a985c666b9f35eee1",
- "revisionTime": "2016-12-15T22:58:39Z"
+ "revision": "39fa3a62ba92cf550eb0f9cfb84757ef79b8aa30",
+ "revisionTime": "2017-01-20T01:07:30Z"
},
{
"checksumSHA1": "oS3SCN9Wd6D8/LG0Yx1fu84a7gI=",
"path": "github.com/hashicorp/hcl/hcl/strconv",
- "revision": "80e628d796135357b3d2e33a985c666b9f35eee1",
- "revisionTime": "2016-12-15T22:58:39Z"
+ "revision": "39fa3a62ba92cf550eb0f9cfb84757ef79b8aa30",
+ "revisionTime": "2017-01-20T01:07:30Z"
},
{
"checksumSHA1": "c6yprzj06ASwCo18TtbbNNBHljA=",
"path": "github.com/hashicorp/hcl/hcl/token",
- "revision": "80e628d796135357b3d2e33a985c666b9f35eee1",
- "revisionTime": "2016-12-15T22:58:39Z"
+ "revision": "39fa3a62ba92cf550eb0f9cfb84757ef79b8aa30",
+ "revisionTime": "2017-01-20T01:07:30Z"
},
{
"checksumSHA1": "138aCV5n8n7tkGYMsMVQQnnLq+0=",
"path": "github.com/hashicorp/hcl/json/parser",
- "revision": "80e628d796135357b3d2e33a985c666b9f35eee1",
- "revisionTime": "2016-12-15T22:58:39Z"
+ "revision": "39fa3a62ba92cf550eb0f9cfb84757ef79b8aa30",
+ "revisionTime": "2017-01-20T01:07:30Z"
},
{
"checksumSHA1": "YdvFsNOMSWMLnY6fcliWQa0O5Fw=",
"path": "github.com/hashicorp/hcl/json/scanner",
- "revision": "80e628d796135357b3d2e33a985c666b9f35eee1",
- "revisionTime": "2016-12-15T22:58:39Z"
+ "revision": "39fa3a62ba92cf550eb0f9cfb84757ef79b8aa30",
+ "revisionTime": "2017-01-20T01:07:30Z"
},
{
"checksumSHA1": "fNlXQCQEnb+B3k5UDL/r15xtSJY=",
"path": "github.com/hashicorp/hcl/json/token",
- "revision": "80e628d796135357b3d2e33a985c666b9f35eee1",
- "revisionTime": "2016-12-15T22:58:39Z"
+ "revision": "39fa3a62ba92cf550eb0f9cfb84757ef79b8aa30",
+ "revisionTime": "2017-01-20T01:07:30Z"
},
{
"checksumSHA1": "31yBeS6U3xm7VJ7ZvDxRgBxXP0A=",
@@ -363,6 +363,12 @@
"revisionTime": "2016-12-28T17:41:50Z"
},
{
+ "checksumSHA1": "1kyuVsFZJgHR7KZ2inXSo2RMzsk=",
+ "path": "github.com/homemade/scl",
+ "revision": "77bb4d7a439e9839b4d5eb285a5b40bb90d699fa",
+ "revisionTime": "2016-12-21T12:54:34Z"
+ },
+ {
"checksumSHA1": "0ZrwvB6KoGPj2PoDNSEJwxQ6Mog=",
"path": "github.com/jmespath/go-jmespath",
"revision": "bd40a432e4c76585ef6b72d3fd96fb9b6dc7b68d",