summaryrefslogtreecommitdiff
path: root/vendor/github.com/aws
diff options
context:
space:
mode:
authorDave Henderson <dhenderson@gmail.com>2017-08-02 22:26:03 -0400
committerDave Henderson <dhenderson@gmail.com>2017-08-02 22:36:43 -0400
commitd874842c9fe228083539b5ac729daecb00996bbf (patch)
treee225225d6b50f1e4df67561b08960be0767f861e /vendor/github.com/aws
parent673ff7f228b902b20fe3cf2d0c69c07f1256599b (diff)
Migrate from glide to dep
Signed-off-by: Dave Henderson <dhenderson@gmail.com>
Diffstat (limited to 'vendor/github.com/aws')
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/LICENSE27
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/PATENTS22
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go624
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/imports.go400
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/util.go14
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/allpackages.go195
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/fakecontext.go108
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/tags.go75
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/util.go167
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/cgo.go209
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go39
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/doc.go205
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/go16.go13
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/loader.go1059
-rw-r--r--vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/util.go124
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/.gitignore5
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/.travis.yml17
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/LICENSE191
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/Makefile12
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/README.md740
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/README_ZH.md727
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/error.go32
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/ini.go549
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/ini_test.go449
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/key.go703
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/key_test.go573
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/parser.go358
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/parser_test.go42
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/section.go234
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/section_test.go75
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/struct.go450
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/struct_test.go337
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-16-BE-BOM.inibin56 -> 0 bytes
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-16-LE-BOM.inibin56 -> 0 bytes
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-8-BOM.ini2
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/aicc.ini11
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/conf.ini2
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/.gitignore4
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/.travis.yml9
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/LICENSE13
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/Makefile44
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/README.md7
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/api.go49
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/api_test.go32
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/astnodetype_string.go16
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/basic.json96
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/boolean.json257
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/current.json25
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/escape.json46
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/filters.json468
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/functions.json825
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/identifiers.json1377
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/indices.json346
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/literal.json185
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/multiselect.json393
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/ormatch.json59
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/pipe.json131
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/slice.json187
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/syntax.json616
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/unicode.json38
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/wildcard.json460
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance_test.go123
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/functions.go842
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/fuzz/jmespath.go13
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/interpreter.go418
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/interpreter_test.go221
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/lexer.go420
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/lexer_test.go161
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/parser.go603
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/parser_test.go136
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/toktype_string.go16
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/util.go185
-rw-r--r--vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/util_test.go73
73 files changed, 0 insertions, 17684 deletions
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/LICENSE b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/LICENSE
deleted file mode 100644
index 6a66aea5..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2009 The Go Authors. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/PATENTS b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/PATENTS
deleted file mode 100644
index 73309904..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/PATENTS
+++ /dev/null
@@ -1,22 +0,0 @@
-Additional IP Rights Grant (Patents)
-
-"This implementation" means the copyrightable works distributed by
-Google as part of the Go project.
-
-Google hereby grants to You a perpetual, worldwide, non-exclusive,
-no-charge, royalty-free, irrevocable (except as stated in this section)
-patent license to make, have made, use, offer to sell, sell, import,
-transfer and otherwise run, modify and propagate the contents of this
-implementation of Go, where such license applies only to those patent
-claims, both currently owned or controlled by Google and acquired in
-the future, licensable by Google that are necessarily infringed by this
-implementation of Go. This grant does not include claims that would be
-infringed only as a consequence of further modification of this
-implementation. If you or your agent or exclusive licensee institute or
-order or agree to the institution of patent litigation against any
-entity (including a cross-claim or counterclaim in a lawsuit) alleging
-that this implementation of Go or any code incorporated within this
-implementation of Go constitutes direct or contributory patent
-infringement, or inducement of patent infringement, then any patent
-rights granted to you under this License for this implementation of Go
-shall terminate as of the date such litigation is filed.
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
deleted file mode 100644
index 340c9e6c..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
+++ /dev/null
@@ -1,624 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package astutil
-
-// This file defines utilities for working with source positions.
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "sort"
-)
-
-// PathEnclosingInterval returns the node that encloses the source
-// interval [start, end), and all its ancestors up to the AST root.
-//
-// The definition of "enclosing" used by this function considers
-// additional whitespace abutting a node to be enclosed by it.
-// In this example:
-//
-// z := x + y // add them
-// <-A->
-// <----B----->
-//
-// the ast.BinaryExpr(+) node is considered to enclose interval B
-// even though its [Pos()..End()) is actually only interval A.
-// This behaviour makes user interfaces more tolerant of imperfect
-// input.
-//
-// This function treats tokens as nodes, though they are not included
-// in the result. e.g. PathEnclosingInterval("+") returns the
-// enclosing ast.BinaryExpr("x + y").
-//
-// If start==end, the 1-char interval following start is used instead.
-//
-// The 'exact' result is true if the interval contains only path[0]
-// and perhaps some adjacent whitespace. It is false if the interval
-// overlaps multiple children of path[0], or if it contains only
-// interior whitespace of path[0].
-// In this example:
-//
-// z := x + y // add them
-// <--C--> <---E-->
-// ^
-// D
-//
-// intervals C, D and E are inexact. C is contained by the
-// z-assignment statement, because it spans three of its children (:=,
-// x, +). So too is the 1-char interval D, because it contains only
-// interior whitespace of the assignment. E is considered interior
-// whitespace of the BlockStmt containing the assignment.
-//
-// Precondition: [start, end) both lie within the same file as root.
-// TODO(adonovan): return (nil, false) in this case and remove precond.
-// Requires FileSet; see loader.tokenFileContainsPos.
-//
-// Postcondition: path is never nil; it always contains at least 'root'.
-//
-func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
- // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
-
- // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
- var visit func(node ast.Node) bool
- visit = func(node ast.Node) bool {
- path = append(path, node)
-
- nodePos := node.Pos()
- nodeEnd := node.End()
-
- // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
-
- // Intersect [start, end) with interval of node.
- if start < nodePos {
- start = nodePos
- }
- if end > nodeEnd {
- end = nodeEnd
- }
-
- // Find sole child that contains [start, end).
- children := childrenOf(node)
- l := len(children)
- for i, child := range children {
- // [childPos, childEnd) is unaugmented interval of child.
- childPos := child.Pos()
- childEnd := child.End()
-
- // [augPos, augEnd) is whitespace-augmented interval of child.
- augPos := childPos
- augEnd := childEnd
- if i > 0 {
- augPos = children[i-1].End() // start of preceding whitespace
- }
- if i < l-1 {
- nextChildPos := children[i+1].Pos()
- // Does [start, end) lie between child and next child?
- if start >= augEnd && end <= nextChildPos {
- return false // inexact match
- }
- augEnd = nextChildPos // end of following whitespace
- }
-
- // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
- // i, augPos, augEnd, start, end) // debugging
-
- // Does augmented child strictly contain [start, end)?
- if augPos <= start && end <= augEnd {
- _, isToken := child.(tokenNode)
- return isToken || visit(child)
- }
-
- // Does [start, end) overlap multiple children?
- // i.e. left-augmented child contains start
- // but LR-augmented child does not contain end.
- if start < childEnd && end > augEnd {
- break
- }
- }
-
- // No single child contained [start, end),
- // so node is the result. Is it exact?
-
- // (It's tempting to put this condition before the
- // child loop, but it gives the wrong result in the
- // case where a node (e.g. ExprStmt) and its sole
- // child have equal intervals.)
- if start == nodePos && end == nodeEnd {
- return true // exact match
- }
-
- return false // inexact: overlaps multiple children
- }
-
- if start > end {
- start, end = end, start
- }
-
- if start < root.End() && end > root.Pos() {
- if start == end {
- end = start + 1 // empty interval => interval of size 1
- }
- exact = visit(root)
-
- // Reverse the path:
- for i, l := 0, len(path); i < l/2; i++ {
- path[i], path[l-1-i] = path[l-1-i], path[i]
- }
- } else {
- // Selection lies within whitespace preceding the
- // first (or following the last) declaration in the file.
- // The result nonetheless always includes the ast.File.
- path = append(path, root)
- }
-
- return
-}
-
-// tokenNode is a dummy implementation of ast.Node for a single token.
-// They are used transiently by PathEnclosingInterval but never escape
-// this package.
-//
-type tokenNode struct {
- pos token.Pos
- end token.Pos
-}
-
-func (n tokenNode) Pos() token.Pos {
- return n.pos
-}
-
-func (n tokenNode) End() token.Pos {
- return n.end
-}
-
-func tok(pos token.Pos, len int) ast.Node {
- return tokenNode{pos, pos + token.Pos(len)}
-}
-
-// childrenOf returns the direct non-nil children of ast.Node n.
-// It may include fake ast.Node implementations for bare tokens.
-// it is not safe to call (e.g.) ast.Walk on such nodes.
-//
-func childrenOf(n ast.Node) []ast.Node {
- var children []ast.Node
-
- // First add nodes for all true subtrees.
- ast.Inspect(n, func(node ast.Node) bool {
- if node == n { // push n
- return true // recur
- }
- if node != nil { // push child
- children = append(children, node)
- }
- return false // no recursion
- })
-
- // Then add fake Nodes for bare tokens.
- switch n := n.(type) {
- case *ast.ArrayType:
- children = append(children,
- tok(n.Lbrack, len("[")),
- tok(n.Elt.End(), len("]")))
-
- case *ast.AssignStmt:
- children = append(children,
- tok(n.TokPos, len(n.Tok.String())))
-
- case *ast.BasicLit:
- children = append(children,
- tok(n.ValuePos, len(n.Value)))
-
- case *ast.BinaryExpr:
- children = append(children, tok(n.OpPos, len(n.Op.String())))
-
- case *ast.BlockStmt:
- children = append(children,
- tok(n.Lbrace, len("{")),
- tok(n.Rbrace, len("}")))
-
- case *ast.BranchStmt:
- children = append(children,
- tok(n.TokPos, len(n.Tok.String())))
-
- case *ast.CallExpr:
- children = append(children,
- tok(n.Lparen, len("(")),
- tok(n.Rparen, len(")")))
- if n.Ellipsis != 0 {
- children = append(children, tok(n.Ellipsis, len("...")))
- }
-
- case *ast.CaseClause:
- if n.List == nil {
- children = append(children,
- tok(n.Case, len("default")))
- } else {
- children = append(children,
- tok(n.Case, len("case")))
- }
- children = append(children, tok(n.Colon, len(":")))
-
- case *ast.ChanType:
- switch n.Dir {
- case ast.RECV:
- children = append(children, tok(n.Begin, len("<-chan")))
- case ast.SEND:
- children = append(children, tok(n.Begin, len("chan<-")))
- case ast.RECV | ast.SEND:
- children = append(children, tok(n.Begin, len("chan")))
- }
-
- case *ast.CommClause:
- if n.Comm == nil {
- children = append(children,
- tok(n.Case, len("default")))
- } else {
- children = append(children,
- tok(n.Case, len("case")))
- }
- children = append(children, tok(n.Colon, len(":")))
-
- case *ast.Comment:
- // nop
-
- case *ast.CommentGroup:
- // nop
-
- case *ast.CompositeLit:
- children = append(children,
- tok(n.Lbrace, len("{")),
- tok(n.Rbrace, len("{")))
-
- case *ast.DeclStmt:
- // nop
-
- case *ast.DeferStmt:
- children = append(children,
- tok(n.Defer, len("defer")))
-
- case *ast.Ellipsis:
- children = append(children,
- tok(n.Ellipsis, len("...")))
-
- case *ast.EmptyStmt:
- // nop
-
- case *ast.ExprStmt:
- // nop
-
- case *ast.Field:
- // TODO(adonovan): Field.{Doc,Comment,Tag}?
-
- case *ast.FieldList:
- children = append(children,
- tok(n.Opening, len("(")),
- tok(n.Closing, len(")")))
-
- case *ast.File:
- // TODO test: Doc
- children = append(children,
- tok(n.Package, len("package")))
-
- case *ast.ForStmt:
- children = append(children,
- tok(n.For, len("for")))
-
- case *ast.FuncDecl:
- // TODO(adonovan): FuncDecl.Comment?
-
- // Uniquely, FuncDecl breaks the invariant that
- // preorder traversal yields tokens in lexical order:
- // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
- //
- // As a workaround, we inline the case for FuncType
- // here and order things correctly.
- //
- children = nil // discard ast.Walk(FuncDecl) info subtrees
- children = append(children, tok(n.Type.Func, len("func")))
- if n.Recv != nil {
- children = append(children, n.Recv)
- }
- children = append(children, n.Name)
- if n.Type.Params != nil {
- children = append(children, n.Type.Params)
- }
- if n.Type.Results != nil {
- children = append(children, n.Type.Results)
- }
- if n.Body != nil {
- children = append(children, n.Body)
- }
-
- case *ast.FuncLit:
- // nop
-
- case *ast.FuncType:
- if n.Func != 0 {
- children = append(children,
- tok(n.Func, len("func")))
- }
-
- case *ast.GenDecl:
- children = append(children,
- tok(n.TokPos, len(n.Tok.String())))
- if n.Lparen != 0 {
- children = append(children,
- tok(n.Lparen, len("(")),
- tok(n.Rparen, len(")")))
- }
-
- case *ast.GoStmt:
- children = append(children,
- tok(n.Go, len("go")))
-
- case *ast.Ident:
- children = append(children,
- tok(n.NamePos, len(n.Name)))
-
- case *ast.IfStmt:
- children = append(children,
- tok(n.If, len("if")))
-
- case *ast.ImportSpec:
- // TODO(adonovan): ImportSpec.{Doc,EndPos}?
-
- case *ast.IncDecStmt:
- children = append(children,
- tok(n.TokPos, len(n.Tok.String())))
-
- case *ast.IndexExpr:
- children = append(children,
- tok(n.Lbrack, len("{")),
- tok(n.Rbrack, len("}")))
-
- case *ast.InterfaceType:
- children = append(children,
- tok(n.Interface, len("interface")))
-
- case *ast.KeyValueExpr:
- children = append(children,
- tok(n.Colon, len(":")))
-
- case *ast.LabeledStmt:
- children = append(children,
- tok(n.Colon, len(":")))
-
- case *ast.MapType:
- children = append(children,
- tok(n.Map, len("map")))
-
- case *ast.ParenExpr:
- children = append(children,
- tok(n.Lparen, len("(")),
- tok(n.Rparen, len(")")))
-
- case *ast.RangeStmt:
- children = append(children,
- tok(n.For, len("for")),
- tok(n.TokPos, len(n.Tok.String())))
-
- case *ast.ReturnStmt:
- children = append(children,
- tok(n.Return, len("return")))
-
- case *ast.SelectStmt:
- children = append(children,
- tok(n.Select, len("select")))
-
- case *ast.SelectorExpr:
- // nop
-
- case *ast.SendStmt:
- children = append(children,
- tok(n.Arrow, len("<-")))
-
- case *ast.SliceExpr:
- children = append(children,
- tok(n.Lbrack, len("[")),
- tok(n.Rbrack, len("]")))
-
- case *ast.StarExpr:
- children = append(children, tok(n.Star, len("*")))
-
- case *ast.StructType:
- children = append(children, tok(n.Struct, len("struct")))
-
- case *ast.SwitchStmt:
- children = append(children, tok(n.Switch, len("switch")))
-
- case *ast.TypeAssertExpr:
- children = append(children,
- tok(n.Lparen-1, len(".")),
- tok(n.Lparen, len("(")),
- tok(n.Rparen, len(")")))
-
- case *ast.TypeSpec:
- // TODO(adonovan): TypeSpec.{Doc,Comment}?
-
- case *ast.TypeSwitchStmt:
- children = append(children, tok(n.Switch, len("switch")))
-
- case *ast.UnaryExpr:
- children = append(children, tok(n.OpPos, len(n.Op.String())))
-
- case *ast.ValueSpec:
- // TODO(adonovan): ValueSpec.{Doc,Comment}?
-
- case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
- // nop
- }
-
- // TODO(adonovan): opt: merge the logic of ast.Inspect() into
- // the switch above so we can make interleaved callbacks for
- // both Nodes and Tokens in the right order and avoid the need
- // to sort.
- sort.Sort(byPos(children))
-
- return children
-}
-
-type byPos []ast.Node
-
-func (sl byPos) Len() int {
- return len(sl)
-}
-func (sl byPos) Less(i, j int) bool {
- return sl[i].Pos() < sl[j].Pos()
-}
-func (sl byPos) Swap(i, j int) {
- sl[i], sl[j] = sl[j], sl[i]
-}
-
-// NodeDescription returns a description of the concrete type of n suitable
-// for a user interface.
-//
-// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
-// StarExpr) we could be much more specific given the path to the AST
-// root. Perhaps we should do that.
-//
-func NodeDescription(n ast.Node) string {
- switch n := n.(type) {
- case *ast.ArrayType:
- return "array type"
- case *ast.AssignStmt:
- return "assignment"
- case *ast.BadDecl:
- return "bad declaration"
- case *ast.BadExpr:
- return "bad expression"
- case *ast.BadStmt:
- return "bad statement"
- case *ast.BasicLit:
- return "basic literal"
- case *ast.BinaryExpr:
- return fmt.Sprintf("binary %s operation", n.Op)
- case *ast.BlockStmt:
- return "block"
- case *ast.BranchStmt:
- switch n.Tok {
- case token.BREAK:
- return "break statement"
- case token.CONTINUE:
- return "continue statement"
- case token.GOTO:
- return "goto statement"
- case token.FALLTHROUGH:
- return "fall-through statement"
- }
- case *ast.CallExpr:
- return "function call (or conversion)"
- case *ast.CaseClause:
- return "case clause"
- case *ast.ChanType:
- return "channel type"
- case *ast.CommClause:
- return "communication clause"
- case *ast.Comment:
- return "comment"
- case *ast.CommentGroup:
- return "comment group"
- case *ast.CompositeLit:
- return "composite literal"
- case *ast.DeclStmt:
- return NodeDescription(n.Decl) + " statement"
- case *ast.DeferStmt:
- return "defer statement"
- case *ast.Ellipsis:
- return "ellipsis"
- case *ast.EmptyStmt:
- return "empty statement"
- case *ast.ExprStmt:
- return "expression statement"
- case *ast.Field:
- // Can be any of these:
- // struct {x, y int} -- struct field(s)
- // struct {T} -- anon struct field
- // interface {I} -- interface embedding
- // interface {f()} -- interface method
- // func (A) func(B) C -- receiver, param(s), result(s)
- return "field/method/parameter"
- case *ast.FieldList:
- return "field/method/parameter list"
- case *ast.File:
- return "source file"
- case *ast.ForStmt:
- return "for loop"
- case *ast.FuncDecl:
- return "function declaration"
- case *ast.FuncLit:
- return "function literal"
- case *ast.FuncType:
- return "function type"
- case *ast.GenDecl:
- switch n.Tok {
- case token.IMPORT:
- return "import declaration"
- case token.CONST:
- return "constant declaration"
- case token.TYPE:
- return "type declaration"
- case token.VAR:
- return "variable declaration"
- }
- case *ast.GoStmt:
- return "go statement"
- case *ast.Ident:
- return "identifier"
- case *ast.IfStmt:
- return "if statement"
- case *ast.ImportSpec:
- return "import specification"
- case *ast.IncDecStmt:
- if n.Tok == token.INC {
- return "increment statement"
- }
- return "decrement statement"
- case *ast.IndexExpr:
- return "index expression"
- case *ast.InterfaceType:
- return "interface type"
- case *ast.KeyValueExpr:
- return "key/value association"
- case *ast.LabeledStmt:
- return "statement label"
- case *ast.MapType:
- return "map type"
- case *ast.Package:
- return "package"
- case *ast.ParenExpr:
- return "parenthesized " + NodeDescription(n.X)
- case *ast.RangeStmt:
- return "range loop"
- case *ast.ReturnStmt:
- return "return statement"
- case *ast.SelectStmt:
- return "select statement"
- case *ast.SelectorExpr:
- return "selector"
- case *ast.SendStmt:
- return "channel send"
- case *ast.SliceExpr:
- return "slice expression"
- case *ast.StarExpr:
- return "*-operation" // load/store expr or pointer type
- case *ast.StructType:
- return "struct type"
- case *ast.SwitchStmt:
- return "switch statement"
- case *ast.TypeAssertExpr:
- return "type assertion"
- case *ast.TypeSpec:
- return "type specification"
- case *ast.TypeSwitchStmt:
- return "type switch"
- case *ast.UnaryExpr:
- return fmt.Sprintf("unary %s operation", n.Op)
- case *ast.ValueSpec:
- return "value specification"
-
- }
- panic(fmt.Sprintf("unexpected node type: %T", n))
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/imports.go
deleted file mode 100644
index a47bcfa3..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/imports.go
+++ /dev/null
@@ -1,400 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package astutil contains common utilities for working with the Go AST.
-package astutil
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "strconv"
- "strings"
-)
-
-// AddImport adds the import path to the file f, if absent.
-func AddImport(fset *token.FileSet, f *ast.File, ipath string) (added bool) {
- return AddNamedImport(fset, f, "", ipath)
-}
-
-// AddNamedImport adds the import path to the file f, if absent.
-// If name is not empty, it is used to rename the import.
-//
-// For example, calling
-// AddNamedImport(fset, f, "pathpkg", "path")
-// adds
-// import pathpkg "path"
-func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added bool) {
- if imports(f, ipath) {
- return false
- }
-
- newImport := &ast.ImportSpec{
- Path: &ast.BasicLit{
- Kind: token.STRING,
- Value: strconv.Quote(ipath),
- },
- }
- if name != "" {
- newImport.Name = &ast.Ident{Name: name}
- }
-
- // Find an import decl to add to.
- // The goal is to find an existing import
- // whose import path has the longest shared
- // prefix with ipath.
- var (
- bestMatch = -1 // length of longest shared prefix
- lastImport = -1 // index in f.Decls of the file's final import decl
- impDecl *ast.GenDecl // import decl containing the best match
- impIndex = -1 // spec index in impDecl containing the best match
- )
- for i, decl := range f.Decls {
- gen, ok := decl.(*ast.GenDecl)
- if ok && gen.Tok == token.IMPORT {
- lastImport = i
- // Do not add to import "C", to avoid disrupting the
- // association with its doc comment, breaking cgo.
- if declImports(gen, "C") {
- continue
- }
-
- // Match an empty import decl if that's all that is available.
- if len(gen.Specs) == 0 && bestMatch == -1 {
- impDecl = gen
- }
-
- // Compute longest shared prefix with imports in this group.
- for j, spec := range gen.Specs {
- impspec := spec.(*ast.ImportSpec)
- n := matchLen(importPath(impspec), ipath)
- if n > bestMatch {
- bestMatch = n
- impDecl = gen
- impIndex = j
- }
- }
- }
- }
-
- // If no import decl found, add one after the last import.
- if impDecl == nil {
- impDecl = &ast.GenDecl{
- Tok: token.IMPORT,
- }
- if lastImport >= 0 {
- impDecl.TokPos = f.Decls[lastImport].End()
- } else {
- // There are no existing imports.
- // Our new import goes after the package declaration and after
- // the comment, if any, that starts on the same line as the
- // package declaration.
- impDecl.TokPos = f.Package
-
- file := fset.File(f.Package)
- pkgLine := file.Line(f.Package)
- for _, c := range f.Comments {
- if file.Line(c.Pos()) > pkgLine {
- break
- }
- impDecl.TokPos = c.End()
- }
- }
- f.Decls = append(f.Decls, nil)
- copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
- f.Decls[lastImport+1] = impDecl
- }
-
- // Insert new import at insertAt.
- insertAt := 0
- if impIndex >= 0 {
- // insert after the found import
- insertAt = impIndex + 1
- }
- impDecl.Specs = append(impDecl.Specs, nil)
- copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
- impDecl.Specs[insertAt] = newImport
- pos := impDecl.Pos()
- if insertAt > 0 {
- // If there is a comment after an existing import, preserve the comment
- // position by adding the new import after the comment.
- if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
- pos = spec.Comment.End()
- } else {
- // Assign same position as the previous import,
- // so that the sorter sees it as being in the same block.
- pos = impDecl.Specs[insertAt-1].Pos()
- }
- }
- if newImport.Name != nil {
- newImport.Name.NamePos = pos
- }
- newImport.Path.ValuePos = pos
- newImport.EndPos = pos
-
- // Clean up parens. impDecl contains at least one spec.
- if len(impDecl.Specs) == 1 {
- // Remove unneeded parens.
- impDecl.Lparen = token.NoPos
- } else if !impDecl.Lparen.IsValid() {
- // impDecl needs parens added.
- impDecl.Lparen = impDecl.Specs[0].Pos()
- }
-
- f.Imports = append(f.Imports, newImport)
-
- if len(f.Decls) <= 1 {
- return true
- }
-
- // Merge all the import declarations into the first one.
- var first *ast.GenDecl
- for i, decl := range f.Decls {
- gen, ok := decl.(*ast.GenDecl)
- if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
- continue
- }
- if first == nil {
- first = gen
- continue // Don't touch the first one.
- }
- // Move the imports of the other import declaration to the first one.
- for _, spec := range gen.Specs {
- spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
- first.Specs = append(first.Specs, spec)
- }
- f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
- }
-
- return true
-}
-
-// DeleteImport deletes the import path from the file f, if present.
-func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
- return DeleteNamedImport(fset, f, "", path)
-}
-
-// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
-func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
- var delspecs []*ast.ImportSpec
-
- // Find the import nodes that import path, if any.
- for i := 0; i < len(f.Decls); i++ {
- decl := f.Decls[i]
- gen, ok := decl.(*ast.GenDecl)
- if !ok || gen.Tok != token.IMPORT {
- continue
- }
- for j := 0; j < len(gen.Specs); j++ {
- spec := gen.Specs[j]
- impspec := spec.(*ast.ImportSpec)
- if impspec.Name == nil && name != "" {
- continue
- }
- if impspec.Name != nil && impspec.Name.Name != name {
- continue
- }
- if importPath(impspec) != path {
- continue
- }
-
- // We found an import spec that imports path.
- // Delete it.
- delspecs = append(delspecs, impspec)
- deleted = true
- copy(gen.Specs[j:], gen.Specs[j+1:])
- gen.Specs = gen.Specs[:len(gen.Specs)-1]
-
- // If this was the last import spec in this decl,
- // delete the decl, too.
- if len(gen.Specs) == 0 {
- copy(f.Decls[i:], f.Decls[i+1:])
- f.Decls = f.Decls[:len(f.Decls)-1]
- i--
- break
- } else if len(gen.Specs) == 1 {
- gen.Lparen = token.NoPos // drop parens
- }
- if j > 0 {
- lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
- lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
- line := fset.Position(impspec.Path.ValuePos).Line
-
- // We deleted an entry but now there may be
- // a blank line-sized hole where the import was.
- if line-lastLine > 1 {
- // There was a blank line immediately preceding the deleted import,
- // so there's no need to close the hole.
- // Do nothing.
- } else {
- // There was no blank line. Close the hole.
- fset.File(gen.Rparen).MergeLine(line)
- }
- }
- j--
- }
- }
-
- // Delete them from f.Imports.
- for i := 0; i < len(f.Imports); i++ {
- imp := f.Imports[i]
- for j, del := range delspecs {
- if imp == del {
- copy(f.Imports[i:], f.Imports[i+1:])
- f.Imports = f.Imports[:len(f.Imports)-1]
- copy(delspecs[j:], delspecs[j+1:])
- delspecs = delspecs[:len(delspecs)-1]
- i--
- break
- }
- }
- }
-
- if len(delspecs) > 0 {
- panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
- }
-
- return
-}
-
-// RewriteImport rewrites any import of path oldPath to path newPath.
-func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
- for _, imp := range f.Imports {
- if importPath(imp) == oldPath {
- rewrote = true
- // record old End, because the default is to compute
- // it using the length of imp.Path.Value.
- imp.EndPos = imp.End()
- imp.Path.Value = strconv.Quote(newPath)
- }
- }
- return
-}
-
-// UsesImport reports whether a given import is used.
-func UsesImport(f *ast.File, path string) (used bool) {
- spec := importSpec(f, path)
- if spec == nil {
- return
- }
-
- name := spec.Name.String()
- switch name {
- case "<nil>":
- // If the package name is not explicitly specified,
- // make an educated guess. This is not guaranteed to be correct.
- lastSlash := strings.LastIndex(path, "/")
- if lastSlash == -1 {
- name = path
- } else {
- name = path[lastSlash+1:]
- }
- case "_", ".":
- // Not sure if this import is used - err on the side of caution.
- return true
- }
-
- ast.Walk(visitFn(func(n ast.Node) {
- sel, ok := n.(*ast.SelectorExpr)
- if ok && isTopName(sel.X, name) {
- used = true
- }
- }), f)
-
- return
-}
-
-type visitFn func(node ast.Node)
-
-func (fn visitFn) Visit(node ast.Node) ast.Visitor {
- fn(node)
- return fn
-}
-
-// imports returns true if f imports path.
-func imports(f *ast.File, path string) bool {
- return importSpec(f, path) != nil
-}
-
-// importSpec returns the import spec if f imports path,
-// or nil otherwise.
-func importSpec(f *ast.File, path string) *ast.ImportSpec {
- for _, s := range f.Imports {
- if importPath(s) == path {
- return s
- }
- }
- return nil
-}
-
-// importPath returns the unquoted import path of s,
-// or "" if the path is not properly quoted.
-func importPath(s *ast.ImportSpec) string {
- t, err := strconv.Unquote(s.Path.Value)
- if err == nil {
- return t
- }
- return ""
-}
-
-// declImports reports whether gen contains an import of path.
-func declImports(gen *ast.GenDecl, path string) bool {
- if gen.Tok != token.IMPORT {
- return false
- }
- for _, spec := range gen.Specs {
- impspec := spec.(*ast.ImportSpec)
- if importPath(impspec) == path {
- return true
- }
- }
- return false
-}
-
-// matchLen returns the length of the longest path segment prefix shared by x and y.
-func matchLen(x, y string) int {
- n := 0
- for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
- if x[i] == '/' {
- n++
- }
- }
- return n
-}
-
-// isTopName returns true if n is a top-level unresolved identifier with the given name.
-func isTopName(n ast.Expr, name string) bool {
- id, ok := n.(*ast.Ident)
- return ok && id.Name == name && id.Obj == nil
-}
-
-// Imports returns the file imports grouped by paragraph.
-func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
- var groups [][]*ast.ImportSpec
-
- for _, decl := range f.Decls {
- genDecl, ok := decl.(*ast.GenDecl)
- if !ok || genDecl.Tok != token.IMPORT {
- break
- }
-
- group := []*ast.ImportSpec{}
-
- var lastLine int
- for _, spec := range genDecl.Specs {
- importSpec := spec.(*ast.ImportSpec)
- pos := importSpec.Path.ValuePos
- line := fset.Position(pos).Line
- if lastLine > 0 && pos > 0 && line-lastLine > 1 {
- groups = append(groups, group)
- group = []*ast.ImportSpec{}
- }
- group = append(group, importSpec)
- lastLine = line
- }
- groups = append(groups, group)
- }
-
- return groups
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/util.go
deleted file mode 100644
index 76306298..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/ast/astutil/util.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package astutil
-
-import "go/ast"
-
-// Unparen returns e with any enclosing parentheses stripped.
-func Unparen(e ast.Expr) ast.Expr {
- for {
- p, ok := e.(*ast.ParenExpr)
- if !ok {
- return e
- }
- e = p.X
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/allpackages.go
deleted file mode 100644
index 30208095..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/allpackages.go
+++ /dev/null
@@ -1,195 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package buildutil provides utilities related to the go/build
-// package in the standard library.
-//
-// All I/O is done via the build.Context file system interface, which must
-// be concurrency-safe.
-package buildutil
-
-import (
- "go/build"
- "os"
- "path/filepath"
- "sort"
- "strings"
- "sync"
-)
-
-// AllPackages returns the package path of each Go package in any source
-// directory of the specified build context (e.g. $GOROOT or an element
-// of $GOPATH). Errors are ignored. The results are sorted.
-// All package paths are canonical, and thus may contain "/vendor/".
-//
-// The result may include import paths for directories that contain no
-// *.go files, such as "archive" (in $GOROOT/src).
-//
-// All I/O is done via the build.Context file system interface,
-// which must be concurrency-safe.
-//
-func AllPackages(ctxt *build.Context) []string {
- var list []string
- ForEachPackage(ctxt, func(pkg string, _ error) {
- list = append(list, pkg)
- })
- sort.Strings(list)
- return list
-}
-
-// ForEachPackage calls the found function with the package path of
-// each Go package it finds in any source directory of the specified
-// build context (e.g. $GOROOT or an element of $GOPATH).
-// All package paths are canonical, and thus may contain "/vendor/".
-//
-// If the package directory exists but could not be read, the second
-// argument to the found function provides the error.
-//
-// All I/O is done via the build.Context file system interface,
-// which must be concurrency-safe.
-//
-func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) {
- ch := make(chan item)
-
- var wg sync.WaitGroup
- for _, root := range ctxt.SrcDirs() {
- root := root
- wg.Add(1)
- go func() {
- allPackages(ctxt, root, ch)
- wg.Done()
- }()
- }
- go func() {
- wg.Wait()
- close(ch)
- }()
-
- // All calls to found occur in the caller's goroutine.
- for i := range ch {
- found(i.importPath, i.err)
- }
-}
-
-type item struct {
- importPath string
- err error // (optional)
-}
-
-// We use a process-wide counting semaphore to limit
-// the number of parallel calls to ReadDir.
-var ioLimit = make(chan bool, 20)
-
-func allPackages(ctxt *build.Context, root string, ch chan<- item) {
- root = filepath.Clean(root) + string(os.PathSeparator)
-
- var wg sync.WaitGroup
-
- var walkDir func(dir string)
- walkDir = func(dir string) {
- // Avoid .foo, _foo, and testdata directory trees.
- base := filepath.Base(dir)
- if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" {
- return
- }
-
- pkg := filepath.ToSlash(strings.TrimPrefix(dir, root))
-
- // Prune search if we encounter any of these import paths.
- switch pkg {
- case "builtin":
- return
- }
-
- ioLimit <- true
- files, err := ReadDir(ctxt, dir)
- <-ioLimit
- if pkg != "" || err != nil {
- ch <- item{pkg, err}
- }
- for _, fi := range files {
- fi := fi
- if fi.IsDir() {
- wg.Add(1)
- go func() {
- walkDir(filepath.Join(dir, fi.Name()))
- wg.Done()
- }()
- }
- }
- }
-
- walkDir(root)
- wg.Wait()
-}
-
-// ExpandPatterns returns the set of packages matched by patterns,
-// which may have the following forms:
-//
-// golang.org/x/tools/cmd/guru # a single package
-// golang.org/x/tools/... # all packages beneath dir
-// ... # the entire workspace.
-//
-// Order is significant: a pattern preceded by '-' removes matching
-// packages from the set. For example, these patterns match all encoding
-// packages except encoding/xml:
-//
-// encoding/... -encoding/xml
-//
-func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool {
- // TODO(adonovan): support other features of 'go list':
- // - "std"/"cmd"/"all" meta-packages
- // - "..." not at the end of a pattern
- // - relative patterns using "./" or "../" prefix
-
- pkgs := make(map[string]bool)
- doPkg := func(pkg string, neg bool) {
- if neg {
- delete(pkgs, pkg)
- } else {
- pkgs[pkg] = true
- }
- }
-
- // Scan entire workspace if wildcards are present.
- // TODO(adonovan): opt: scan only the necessary subtrees of the workspace.
- var all []string
- for _, arg := range patterns {
- if strings.HasSuffix(arg, "...") {
- all = AllPackages(ctxt)
- break
- }
- }
-
- for _, arg := range patterns {
- if arg == "" {
- continue
- }
-
- neg := arg[0] == '-'
- if neg {
- arg = arg[1:]
- }
-
- if arg == "..." {
- // ... matches all packages
- for _, pkg := range all {
- doPkg(pkg, neg)
- }
- } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg {
- // dir/... matches all packages beneath dir
- for _, pkg := range all {
- if strings.HasPrefix(pkg, dir) &&
- (len(pkg) == len(dir) || pkg[len(dir)] == '/') {
- doPkg(pkg, neg)
- }
- }
- } else {
- // single package
- doPkg(arg, neg)
- }
- }
-
- return pkgs
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
deleted file mode 100644
index 24cbcbea..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
+++ /dev/null
@@ -1,108 +0,0 @@
-package buildutil
-
-import (
- "fmt"
- "go/build"
- "io"
- "io/ioutil"
- "os"
- "path"
- "path/filepath"
- "sort"
- "strings"
- "time"
-)
-
-// FakeContext returns a build.Context for the fake file tree specified
-// by pkgs, which maps package import paths to a mapping from file base
-// names to contents.
-//
-// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides
-// the necessary file access methods to read from memory instead of the
-// real file system.
-//
-// Unlike a real file tree, the fake one has only two levels---packages
-// and files---so ReadDir("/go/src/") returns all packages under
-// /go/src/ including, for instance, "math" and "math/big".
-// ReadDir("/go/src/math/big") would return all the files in the
-// "math/big" package.
-//
-func FakeContext(pkgs map[string]map[string]string) *build.Context {
- clean := func(filename string) string {
- f := path.Clean(filepath.ToSlash(filename))
- // Removing "/go/src" while respecting segment
- // boundaries has this unfortunate corner case:
- if f == "/go/src" {
- return ""
- }
- return strings.TrimPrefix(f, "/go/src/")
- }
-
- ctxt := build.Default // copy
- ctxt.GOROOT = "/go"
- ctxt.GOPATH = ""
- ctxt.IsDir = func(dir string) bool {
- dir = clean(dir)
- if dir == "" {
- return true // needed by (*build.Context).SrcDirs
- }
- return pkgs[dir] != nil
- }
- ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
- dir = clean(dir)
- var fis []os.FileInfo
- if dir == "" {
- // enumerate packages
- for importPath := range pkgs {
- fis = append(fis, fakeDirInfo(importPath))
- }
- } else {
- // enumerate files of package
- for basename := range pkgs[dir] {
- fis = append(fis, fakeFileInfo(basename))
- }
- }
- sort.Sort(byName(fis))
- return fis, nil
- }
- ctxt.OpenFile = func(filename string) (io.ReadCloser, error) {
- filename = clean(filename)
- dir, base := path.Split(filename)
- content, ok := pkgs[path.Clean(dir)][base]
- if !ok {
- return nil, fmt.Errorf("file not found: %s", filename)
- }
- return ioutil.NopCloser(strings.NewReader(content)), nil
- }
- ctxt.IsAbsPath = func(path string) bool {
- path = filepath.ToSlash(path)
- // Don't rely on the default (filepath.Path) since on
- // Windows, it reports virtual paths as non-absolute.
- return strings.HasPrefix(path, "/")
- }
- return &ctxt
-}
-
-type byName []os.FileInfo
-
-func (s byName) Len() int { return len(s) }
-func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
-func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
-
-type fakeFileInfo string
-
-func (fi fakeFileInfo) Name() string { return string(fi) }
-func (fakeFileInfo) Sys() interface{} { return nil }
-func (fakeFileInfo) ModTime() time.Time { return time.Time{} }
-func (fakeFileInfo) IsDir() bool { return false }
-func (fakeFileInfo) Size() int64 { return 0 }
-func (fakeFileInfo) Mode() os.FileMode { return 0644 }
-
-type fakeDirInfo string
-
-func (fd fakeDirInfo) Name() string { return string(fd) }
-func (fakeDirInfo) Sys() interface{} { return nil }
-func (fakeDirInfo) ModTime() time.Time { return time.Time{} }
-func (fakeDirInfo) IsDir() bool { return true }
-func (fakeDirInfo) Size() int64 { return 0 }
-func (fakeDirInfo) Mode() os.FileMode { return 0755 }
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/tags.go
deleted file mode 100644
index 486606f3..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/tags.go
+++ /dev/null
@@ -1,75 +0,0 @@
-package buildutil
-
-// This logic was copied from stringsFlag from $GOROOT/src/cmd/go/build.go.
-
-import "fmt"
-
-const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " +
- "For more information about build tags, see the description of " +
- "build constraints in the documentation for the go/build package"
-
-// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses
-// a flag value in the same manner as go build's -tags flag and
-// populates a []string slice.
-//
-// See $GOROOT/src/go/build/doc.go for description of build tags.
-// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag.
-//
-// Example:
-// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
-type TagsFlag []string
-
-func (v *TagsFlag) Set(s string) error {
- var err error
- *v, err = splitQuotedFields(s)
- if *v == nil {
- *v = []string{}
- }
- return err
-}
-
-func (v *TagsFlag) Get() interface{} { return *v }
-
-func splitQuotedFields(s string) ([]string, error) {
- // Split fields allowing '' or "" around elements.
- // Quotes further inside the string do not count.
- var f []string
- for len(s) > 0 {
- for len(s) > 0 && isSpaceByte(s[0]) {
- s = s[1:]
- }
- if len(s) == 0 {
- break
- }
- // Accepted quoted string. No unescaping inside.
- if s[0] == '"' || s[0] == '\'' {
- quote := s[0]
- s = s[1:]
- i := 0
- for i < len(s) && s[i] != quote {
- i++
- }
- if i >= len(s) {
- return nil, fmt.Errorf("unterminated %c string", quote)
- }
- f = append(f, s[:i])
- s = s[i+1:]
- continue
- }
- i := 0
- for i < len(s) && !isSpaceByte(s[i]) {
- i++
- }
- f = append(f, s[:i])
- s = s[i:]
- }
- return f, nil
-}
-
-func (v *TagsFlag) String() string {
- return "<tagsFlag>"
-}
-
-func isSpaceByte(c byte) bool {
- return c == ' ' || c == '\t' || c == '\n' || c == '\r'
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/util.go
deleted file mode 100644
index 0e093fc0..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/buildutil/util.go
+++ /dev/null
@@ -1,167 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package buildutil
-
-import (
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "io"
- "io/ioutil"
- "os"
- "path"
- "path/filepath"
- "runtime"
- "strings"
-)
-
-// ParseFile behaves like parser.ParseFile,
-// but uses the build context's file system interface, if any.
-//
-// If file is not absolute (as defined by IsAbsPath), the (dir, file)
-// components are joined using JoinPath; dir must be absolute.
-//
-// The displayPath function, if provided, is used to transform the
-// filename that will be attached to the ASTs.
-//
-// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws.
-//
-func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) {
- if !IsAbsPath(ctxt, file) {
- file = JoinPath(ctxt, dir, file)
- }
- rd, err := OpenFile(ctxt, file)
- if err != nil {
- return nil, err
- }
- defer rd.Close() // ignore error
- if displayPath != nil {
- file = displayPath(file)
- }
- return parser.ParseFile(fset, file, rd, mode)
-}
-
-// ContainingPackage returns the package containing filename.
-//
-// If filename is not absolute, it is interpreted relative to working directory dir.
-// All I/O is via the build context's file system interface, if any.
-//
-// The '...Files []string' fields of the resulting build.Package are not
-// populated (build.FindOnly mode).
-//
-// TODO(adonovan): call this from oracle when the tree thaws.
-//
-func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) {
- if !IsAbsPath(ctxt, filename) {
- filename = JoinPath(ctxt, dir, filename)
- }
-
- // We must not assume the file tree uses
- // "/" always,
- // `\` always,
- // or os.PathSeparator (which varies by platform),
- // but to make any progress, we are forced to assume that
- // paths will not use `\` unless the PathSeparator
- // is also `\`, thus we can rely on filepath.ToSlash for some sanity.
-
- dirSlash := path.Dir(filepath.ToSlash(filename)) + "/"
-
- // We assume that no source root (GOPATH[i] or GOROOT) contains any other.
- for _, srcdir := range ctxt.SrcDirs() {
- srcdirSlash := filepath.ToSlash(srcdir) + "/"
- if dirHasPrefix(dirSlash, srcdirSlash) {
- importPath := dirSlash[len(srcdirSlash) : len(dirSlash)-len("/")]
- return ctxt.Import(importPath, dir, build.FindOnly)
- }
- }
-
- return nil, fmt.Errorf("can't find package containing %s", filename)
-}
-
-// dirHasPrefix tests whether the directory dir begins with prefix.
-func dirHasPrefix(dir, prefix string) bool {
- if runtime.GOOS != "windows" {
- return strings.HasPrefix(dir, prefix)
- }
- return len(dir) >= len(prefix) && strings.EqualFold(dir[:len(prefix)], prefix)
-}
-
-// -- Effective methods of file system interface -------------------------
-
-// (go/build.Context defines these as methods, but does not export them.)
-
-// TODO(adonovan): HasSubdir?
-
-// FileExists returns true if the specified file exists,
-// using the build context's file system interface.
-func FileExists(ctxt *build.Context, path string) bool {
- if ctxt.OpenFile != nil {
- r, err := ctxt.OpenFile(path)
- if err != nil {
- return false
- }
- r.Close() // ignore error
- return true
- }
- _, err := os.Stat(path)
- return err == nil
-}
-
-// OpenFile behaves like os.Open,
-// but uses the build context's file system interface, if any.
-func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) {
- if ctxt.OpenFile != nil {
- return ctxt.OpenFile(path)
- }
- return os.Open(path)
-}
-
-// IsAbsPath behaves like filepath.IsAbs,
-// but uses the build context's file system interface, if any.
-func IsAbsPath(ctxt *build.Context, path string) bool {
- if ctxt.IsAbsPath != nil {
- return ctxt.IsAbsPath(path)
- }
- return filepath.IsAbs(path)
-}
-
-// JoinPath behaves like filepath.Join,
-// but uses the build context's file system interface, if any.
-func JoinPath(ctxt *build.Context, path ...string) string {
- if ctxt.JoinPath != nil {
- return ctxt.JoinPath(path...)
- }
- return filepath.Join(path...)
-}
-
-// IsDir behaves like os.Stat plus IsDir,
-// but uses the build context's file system interface, if any.
-func IsDir(ctxt *build.Context, path string) bool {
- if ctxt.IsDir != nil {
- return ctxt.IsDir(path)
- }
- fi, err := os.Stat(path)
- return err == nil && fi.IsDir()
-}
-
-// ReadDir behaves like ioutil.ReadDir,
-// but uses the build context's file system interface, if any.
-func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) {
- if ctxt.ReadDir != nil {
- return ctxt.ReadDir(path)
- }
- return ioutil.ReadDir(path)
-}
-
-// SplitPathList behaves like filepath.SplitList,
-// but uses the build context's file system interface, if any.
-func SplitPathList(ctxt *build.Context, s string) []string {
- if ctxt.SplitPathList != nil {
- return ctxt.SplitPathList(s)
- }
- return filepath.SplitList(s)
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/cgo.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/cgo.go
deleted file mode 100644
index 245b9149..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/cgo.go
+++ /dev/null
@@ -1,209 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.5
-
-package loader
-
-// This file handles cgo preprocessing of files containing `import "C"`.
-//
-// DESIGN
-//
-// The approach taken is to run the cgo processor on the package's
-// CgoFiles and parse the output, faking the filenames of the
-// resulting ASTs so that the synthetic file containing the C types is
-// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
-// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
-// not the names of the actual temporary files.
-//
-// The advantage of this approach is its fidelity to 'go build'. The
-// downside is that the token.Position.Offset for each AST node is
-// incorrect, being an offset within the temporary file. Line numbers
-// should still be correct because of the //line comments.
-//
-// The logic of this file is mostly plundered from the 'go build'
-// tool, which also invokes the cgo preprocessor.
-//
-//
-// REJECTED ALTERNATIVE
-//
-// An alternative approach that we explored is to extend go/types'
-// Importer mechanism to provide the identity of the importing package
-// so that each time `import "C"` appears it resolves to a different
-// synthetic package containing just the objects needed in that case.
-// The loader would invoke cgo but parse only the cgo_types.go file
-// defining the package-level objects, discarding the other files
-// resulting from preprocessing.
-//
-// The benefit of this approach would have been that source-level
-// syntax information would correspond exactly to the original cgo
-// file, with no preprocessing involved, making source tools like
-// godoc, oracle, and eg happy. However, the approach was rejected
-// due to the additional complexity it would impose on go/types. (It
-// made for a beautiful demo, though.)
-//
-// cgo files, despite their *.go extension, are not legal Go source
-// files per the specification since they may refer to unexported
-// members of package "C" such as C.int. Also, a function such as
-// C.getpwent has in effect two types, one matching its C type and one
-// which additionally returns (errno C.int). The cgo preprocessor
-// uses name mangling to distinguish these two functions in the
-// processed code, but go/types would need to duplicate this logic in
-// its handling of function calls, analogous to the treatment of map
-// lookups in which y=m[k] and y,ok=m[k] are both legal.
-
-import (
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "io/ioutil"
- "log"
- "os"
- "os/exec"
- "path/filepath"
- "regexp"
- "strings"
-)
-
-// processCgoFiles invokes the cgo preprocessor on bp.CgoFiles, parses
-// the output and returns the resulting ASTs.
-//
-func processCgoFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
- tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
- if err != nil {
- return nil, err
- }
- defer os.RemoveAll(tmpdir)
-
- pkgdir := bp.Dir
- if DisplayPath != nil {
- pkgdir = DisplayPath(pkgdir)
- }
-
- cgoFiles, cgoDisplayFiles, err := runCgo(bp, pkgdir, tmpdir)
- if err != nil {
- return nil, err
- }
- var files []*ast.File
- for i := range cgoFiles {
- rd, err := os.Open(cgoFiles[i])
- if err != nil {
- return nil, err
- }
- display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
- f, err := parser.ParseFile(fset, display, rd, mode)
- rd.Close()
- if err != nil {
- return nil, err
- }
- files = append(files, f)
- }
- return files, nil
-}
-
-var cgoRe = regexp.MustCompile(`[/\\:]`)
-
-// runCgo invokes the cgo preprocessor on bp.CgoFiles and returns two
-// lists of files: the resulting processed files (in temporary
-// directory tmpdir) and the corresponding names of the unprocessed files.
-//
-// runCgo is adapted from (*builder).cgo in
-// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
-// Objective C, CGOPKGPATH, CGO_FLAGS.
-//
-func runCgo(bp *build.Package, pkgdir, tmpdir string) (files, displayFiles []string, err error) {
- cgoCPPFLAGS, _, _, _ := cflags(bp, true)
- _, cgoexeCFLAGS, _, _ := cflags(bp, false)
-
- if len(bp.CgoPkgConfig) > 0 {
- pcCFLAGS, err := pkgConfigFlags(bp)
- if err != nil {
- return nil, nil, err
- }
- cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
- }
-
- // Allows including _cgo_export.h from .[ch] files in the package.
- cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
-
- // _cgo_gotypes.go (displayed "C") contains the type definitions.
- files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
- displayFiles = append(displayFiles, "C")
- for _, fn := range bp.CgoFiles {
- // "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
- f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
- files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
- displayFiles = append(displayFiles, fn)
- }
-
- var cgoflags []string
- if bp.Goroot && bp.ImportPath == "runtime/cgo" {
- cgoflags = append(cgoflags, "-import_runtime_cgo=false")
- }
- if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
- cgoflags = append(cgoflags, "-import_syscall=false")
- }
-
- args := stringList(
- "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
- cgoCPPFLAGS, cgoexeCFLAGS, bp.CgoFiles,
- )
- if false {
- log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
- }
- cmd := exec.Command(args[0], args[1:]...)
- cmd.Dir = pkgdir
- cmd.Stdout = os.Stderr
- cmd.Stderr = os.Stderr
- if err := cmd.Run(); err != nil {
- return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
- }
-
- return files, displayFiles, nil
-}
-
-// -- unmodified from 'go build' ---------------------------------------
-
-// Return the flags to use when invoking the C or C++ compilers, or cgo.
-func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
- var defaults string
- if def {
- defaults = "-g -O2"
- }
-
- cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
- cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
- cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
- ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
- return
-}
-
-// envList returns the value of the given environment variable broken
-// into fields, using the default value when the variable is empty.
-func envList(key, def string) []string {
- v := os.Getenv(key)
- if v == "" {
- v = def
- }
- return strings.Fields(v)
-}
-
-// stringList's arguments should be a sequence of string or []string values.
-// stringList flattens them into a single []string.
-func stringList(args ...interface{}) []string {
- var x []string
- for _, arg := range args {
- switch arg := arg.(type) {
- case []string:
- x = append(x, arg...)
- case string:
- x = append(x, arg)
- default:
- panic("stringList: invalid argument")
- }
- }
- return x
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go
deleted file mode 100644
index de57422d..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package loader
-
-import (
- "errors"
- "fmt"
- "go/build"
- "os/exec"
- "strings"
-)
-
-// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
-func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
- cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
- out, err := cmd.CombinedOutput()
- if err != nil {
- s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
- if len(out) > 0 {
- s = fmt.Sprintf("%s: %s", s, out)
- }
- return nil, errors.New(s)
- }
- if len(out) > 0 {
- flags = strings.Fields(string(out))
- }
- return
-}
-
-// pkgConfigFlags calls pkg-config if needed and returns the cflags
-// needed to build the package.
-func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
- if len(p.CgoPkgConfig) == 0 {
- return nil, nil
- }
- return pkgConfig("--cflags", p.CgoPkgConfig)
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/doc.go
deleted file mode 100644
index 9b51c9ec..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/doc.go
+++ /dev/null
@@ -1,205 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package loader loads a complete Go program from source code, parsing
-// and type-checking the initial packages plus their transitive closure
-// of dependencies. The ASTs and the derived facts are retained for
-// later use.
-//
-// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE.
-//
-// The package defines two primary types: Config, which specifies a
-// set of initial packages to load and various other options; and
-// Program, which is the result of successfully loading the packages
-// specified by a configuration.
-//
-// The configuration can be set directly, but *Config provides various
-// convenience methods to simplify the common cases, each of which can
-// be called any number of times. Finally, these are followed by a
-// call to Load() to actually load and type-check the program.
-//
-// var conf loader.Config
-//
-// // Use the command-line arguments to specify
-// // a set of initial packages to load from source.
-// // See FromArgsUsage for help.
-// rest, err := conf.FromArgs(os.Args[1:], wantTests)
-//
-// // Parse the specified files and create an ad hoc package with path "foo".
-// // All files must have the same 'package' declaration.
-// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
-//
-// // Create an ad hoc package with path "foo" from
-// // the specified already-parsed files.
-// // All ASTs must have the same 'package' declaration.
-// conf.CreateFromFiles("foo", parsedFiles)
-//
-// // Add "runtime" to the set of packages to be loaded.
-// conf.Import("runtime")
-//
-// // Adds "fmt" and "fmt_test" to the set of packages
-// // to be loaded. "fmt" will include *_test.go files.
-// conf.ImportWithTests("fmt")
-//
-// // Finally, load all the packages specified by the configuration.
-// prog, err := conf.Load()
-//
-// See examples_test.go for examples of API usage.
-//
-//
-// CONCEPTS AND TERMINOLOGY
-//
-// The WORKSPACE is the set of packages accessible to the loader. The
-// workspace is defined by Config.Build, a *build.Context. The
-// default context treats subdirectories of $GOROOT and $GOPATH as
-// packages, but this behavior may be overridden.
-//
-// An AD HOC package is one specified as a set of source files on the
-// command line. In the simplest case, it may consist of a single file
-// such as $GOROOT/src/net/http/triv.go.
-//
-// EXTERNAL TEST packages are those comprised of a set of *_test.go
-// files all with the same 'package foo_test' declaration, all in the
-// same directory. (go/build.Package calls these files XTestFiles.)
-//
-// An IMPORTABLE package is one that can be referred to by some import
-// spec. Every importable package is uniquely identified by its
-// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json",
-// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path
-// typically denotes a subdirectory of the workspace.
-//
-// An import declaration uses an IMPORT PATH to refer to a package.
-// Most import declarations use the package path as the import path.
-//
-// Due to VENDORING (https://golang.org/s/go15vendor), the
-// interpretation of an import path may depend on the directory in which
-// it appears. To resolve an import path to a package path, go/build
-// must search the enclosing directories for a subdirectory named
-// "vendor".
-//
-// ad hoc packages and external test packages are NON-IMPORTABLE. The
-// path of an ad hoc package is inferred from the package
-// declarations of its files and is therefore not a unique package key.
-// For example, Config.CreatePkgs may specify two initial ad hoc
-// packages, both with path "main".
-//
-// An AUGMENTED package is an importable package P plus all the
-// *_test.go files with same 'package foo' declaration as P.
-// (go/build.Package calls these files TestFiles.)
-//
-// The INITIAL packages are those specified in the configuration. A
-// DEPENDENCY is a package loaded to satisfy an import in an initial
-// package or another dependency.
-//
-package loader
-
-// IMPLEMENTATION NOTES
-//
-// 'go test', in-package test files, and import cycles
-// ---------------------------------------------------
-//
-// An external test package may depend upon members of the augmented
-// package that are not in the unaugmented package, such as functions
-// that expose internals. (See bufio/export_test.go for an example.)
-// So, the loader must ensure that for each external test package
-// it loads, it also augments the corresponding non-test package.
-//
-// The import graph over n unaugmented packages must be acyclic; the
-// import graph over n-1 unaugmented packages plus one augmented
-// package must also be acyclic. ('go test' relies on this.) But the
-// import graph over n augmented packages may contain cycles.
-//
-// First, all the (unaugmented) non-test packages and their
-// dependencies are imported in the usual way; the loader reports an
-// error if it detects an import cycle.
-//
-// Then, each package P for which testing is desired is augmented by
-// the list P' of its in-package test files, by calling
-// (*types.Checker).Files. This arrangement ensures that P' may
-// reference definitions within P, but P may not reference definitions
-// within P'. Furthermore, P' may import any other package, including
-// ones that depend upon P, without an import cycle error.
-//
-// Consider two packages A and B, both of which have lists of
-// in-package test files we'll call A' and B', and which have the
-// following import graph edges:
-// B imports A
-// B' imports A
-// A' imports B
-// This last edge would be expected to create an error were it not
-// for the special type-checking discipline above.
-// Cycles of size greater than two are possible. For example:
-// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil"
-// io/ioutil/tempfile_test.go (package ioutil) imports "regexp"
-// regexp/exec_test.go (package regexp) imports "compress/bzip2"
-//
-//
-// Concurrency
-// -----------
-//
-// Let us define the import dependency graph as follows. Each node is a
-// list of files passed to (Checker).Files at once. Many of these lists
-// are the production code of an importable Go package, so those nodes
-// are labelled by the package's path. The remaining nodes are
-// ad hoc packages and lists of in-package *_test.go files that augment
-// an importable package; those nodes have no label.
-//
-// The edges of the graph represent import statements appearing within a
-// file. An edge connects a node (a list of files) to the node it
-// imports, which is importable and thus always labelled.
-//
-// Loading is controlled by this dependency graph.
-//
-// To reduce I/O latency, we start loading a package's dependencies
-// asynchronously as soon as we've parsed its files and enumerated its
-// imports (scanImports). This performs a preorder traversal of the
-// import dependency graph.
-//
-// To exploit hardware parallelism, we type-check unrelated packages in
-// parallel, where "unrelated" means not ordered by the partial order of
-// the import dependency graph.
-//
-// We use a concurrency-safe non-blocking cache (importer.imported) to
-// record the results of type-checking, whether success or failure. An
-// entry is created in this cache by startLoad the first time the
-// package is imported. The first goroutine to request an entry becomes
-// responsible for completing the task and broadcasting completion to
-// subsequent requestors, which block until then.
-//
-// Type checking occurs in (parallel) postorder: we cannot type-check a
-// set of files until we have loaded and type-checked all of their
-// immediate dependencies (and thus all of their transitive
-// dependencies). If the input were guaranteed free of import cycles,
-// this would be trivial: we could simply wait for completion of the
-// dependencies and then invoke the typechecker.
-//
-// But as we saw in the 'go test' section above, some cycles in the
-// import graph over packages are actually legal, so long as the
-// cycle-forming edge originates in the in-package test files that
-// augment the package. This explains why the nodes of the import
-// dependency graph are not packages, but lists of files: the unlabelled
-// nodes avoid the cycles. Consider packages A and B where B imports A
-// and A's in-package tests AT import B. The naively constructed import
-// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but
-// the graph over lists of files is AT --> B --> A, where AT is an
-// unlabelled node.
-//
-// Awaiting completion of the dependencies in a cyclic graph would
-// deadlock, so we must materialize the import dependency graph (as
-// importer.graph) and check whether each import edge forms a cycle. If
-// x imports y, and the graph already contains a path from y to x, then
-// there is an import cycle, in which case the processing of x must not
-// wait for the completion of processing of y.
-//
-// When the type-checker makes a callback (doImport) to the loader for a
-// given import edge, there are two possible cases. In the normal case,
-// the dependency has already been completely type-checked; doImport
-// does a cache lookup and returns it. In the cyclic case, the entry in
-// the cache is still necessarily incomplete, indicating a cycle. We
-// perform the cycle check again to obtain the error message, and return
-// the error.
-//
-// The result of using concurrency is about a 2.5x speedup for stdlib_test.
-
-// TODO(adonovan): overhaul the package documentation.
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/go16.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/go16.go
deleted file mode 100644
index c0ed50f4..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/go16.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.6
-
-package loader
-
-import "go/build"
-
-func init() {
- ignoreVendor = build.IgnoreVendor
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/loader.go
deleted file mode 100644
index f0171fc9..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/loader.go
+++ /dev/null
@@ -1,1059 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.5
-
-package loader
-
-// See doc.go for package documentation and implementation notes.
-
-import (
- "errors"
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "go/types"
- "os"
- "sort"
- "strings"
- "sync"
- "time"
-
- "golang.org/x/tools/go/ast/astutil"
-)
-
-var ignoreVendor build.ImportMode
-
-const trace = false // show timing info for type-checking
-
-// Config specifies the configuration for loading a whole program from
-// Go source code.
-// The zero value for Config is a ready-to-use default configuration.
-type Config struct {
- // Fset is the file set for the parser to use when loading the
- // program. If nil, it may be lazily initialized by any
- // method of Config.
- Fset *token.FileSet
-
- // ParserMode specifies the mode to be used by the parser when
- // loading source packages.
- ParserMode parser.Mode
-
- // TypeChecker contains options relating to the type checker.
- //
- // The supplied IgnoreFuncBodies is not used; the effective
- // value comes from the TypeCheckFuncBodies func below.
- // The supplied Import function is not used either.
- TypeChecker types.Config
-
- // TypeCheckFuncBodies is a predicate over package paths.
- // A package for which the predicate is false will
- // have its package-level declarations type checked, but not
- // its function bodies; this can be used to quickly load
- // dependencies from source. If nil, all func bodies are type
- // checked.
- TypeCheckFuncBodies func(path string) bool
-
- // If Build is non-nil, it is used to locate source packages.
- // Otherwise &build.Default is used.
- //
- // By default, cgo is invoked to preprocess Go files that
- // import the fake package "C". This behaviour can be
- // disabled by setting CGO_ENABLED=0 in the environment prior
- // to startup, or by setting Build.CgoEnabled=false.
- Build *build.Context
-
- // The current directory, used for resolving relative package
- // references such as "./go/loader". If empty, os.Getwd will be
- // used instead.
- Cwd string
-
- // If DisplayPath is non-nil, it is used to transform each
- // file name obtained from Build.Import(). This can be used
- // to prevent a virtualized build.Config's file names from
- // leaking into the user interface.
- DisplayPath func(path string) string
-
- // If AllowErrors is true, Load will return a Program even
- // if some of the its packages contained I/O, parser or type
- // errors; such errors are accessible via PackageInfo.Errors. If
- // false, Load will fail if any package had an error.
- AllowErrors bool
-
- // CreatePkgs specifies a list of non-importable initial
- // packages to create. The resulting packages will appear in
- // the corresponding elements of the Program.Created slice.
- CreatePkgs []PkgSpec
-
- // ImportPkgs specifies a set of initial packages to load.
- // The map keys are package paths.
- //
- // The map value indicates whether to load tests. If true, Load
- // will add and type-check two lists of files to the package:
- // non-test files followed by in-package *_test.go files. In
- // addition, it will append the external test package (if any)
- // to Program.Created.
- ImportPkgs map[string]bool
-
- // FindPackage is called during Load to create the build.Package
- // for a given import path from a given directory.
- // If FindPackage is nil, (*build.Context).Import is used.
- // A client may use this hook to adapt to a proprietary build
- // system that does not follow the "go build" layout
- // conventions, for example.
- //
- // It must be safe to call concurrently from multiple goroutines.
- FindPackage func(ctxt *build.Context, fromDir, importPath string, mode build.ImportMode) (*build.Package, error)
-
- // AfterTypeCheck is called immediately after a list of files
- // has been type-checked and appended to info.Files.
- //
- // This optional hook function is the earliest opportunity for
- // the client to observe the output of the type checker,
- // which may be useful to reduce analysis latency when loading
- // a large program.
- //
- // The function is permitted to modify info.Info, for instance
- // to clear data structures that are no longer needed, which can
- // dramatically reduce peak memory consumption.
- //
- // The function may be called twice for the same PackageInfo:
- // once for the files of the package and again for the
- // in-package test files.
- //
- // It must be safe to call concurrently from multiple goroutines.
- AfterTypeCheck func(info *PackageInfo, files []*ast.File)
-}
-
-// A PkgSpec specifies a non-importable package to be created by Load.
-// Files are processed first, but typically only one of Files and
-// Filenames is provided. The path needn't be globally unique.
-//
-type PkgSpec struct {
- Path string // package path ("" => use package declaration)
- Files []*ast.File // ASTs of already-parsed files
- Filenames []string // names of files to be parsed
-}
-
-// A Program is a Go program loaded from source as specified by a Config.
-type Program struct {
- Fset *token.FileSet // the file set for this program
-
- // Created[i] contains the initial package whose ASTs or
- // filenames were supplied by Config.CreatePkgs[i], followed by
- // the external test package, if any, of each package in
- // Config.ImportPkgs ordered by ImportPath.
- //
- // NOTE: these files must not import "C". Cgo preprocessing is
- // only performed on imported packages, not ad hoc packages.
- //
- // TODO(adonovan): we need to copy and adapt the logic of
- // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make
- // Config.Import and Config.Create methods return the same kind
- // of entity, essentially a build.Package.
- // Perhaps we can even reuse that type directly.
- Created []*PackageInfo
-
- // Imported contains the initially imported packages,
- // as specified by Config.ImportPkgs.
- Imported map[string]*PackageInfo
-
- // AllPackages contains the PackageInfo of every package
- // encountered by Load: all initial packages and all
- // dependencies, including incomplete ones.
- AllPackages map[*types.Package]*PackageInfo
-
- // importMap is the canonical mapping of package paths to
- // packages. It contains all Imported initial packages, but not
- // Created ones, and all imported dependencies.
- importMap map[string]*types.Package
-}
-
-// PackageInfo holds the ASTs and facts derived by the type-checker
-// for a single package.
-//
-// Not mutated once exposed via the API.
-//
-type PackageInfo struct {
- Pkg *types.Package
- Importable bool // true if 'import "Pkg.Path()"' would resolve to this
- TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors
- Files []*ast.File // syntax trees for the package's files
- Errors []error // non-nil if the package had errors
- types.Info // type-checker deductions.
- dir string // package directory
-
- checker *types.Checker // transient type-checker state
- errorFunc func(error)
-}
-
-func (info *PackageInfo) String() string { return info.Pkg.Path() }
-
-func (info *PackageInfo) appendError(err error) {
- if info.errorFunc != nil {
- info.errorFunc(err)
- } else {
- fmt.Fprintln(os.Stderr, err)
- }
- info.Errors = append(info.Errors, err)
-}
-
-func (conf *Config) fset() *token.FileSet {
- if conf.Fset == nil {
- conf.Fset = token.NewFileSet()
- }
- return conf.Fset
-}
-
-// ParseFile is a convenience function (intended for testing) that invokes
-// the parser using the Config's FileSet, which is initialized if nil.
-//
-// src specifies the parser input as a string, []byte, or io.Reader, and
-// filename is its apparent name. If src is nil, the contents of
-// filename are read from the file system.
-//
-func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) {
- // TODO(adonovan): use conf.build() etc like parseFiles does.
- return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode)
-}
-
-// FromArgsUsage is a partial usage message that applications calling
-// FromArgs may wish to include in their -help output.
-const FromArgsUsage = `
-<args> is a list of arguments denoting a set of initial packages.
-It may take one of two forms:
-
-1. A list of *.go source files.
-
- All of the specified files are loaded, parsed and type-checked
- as a single package. All the files must belong to the same directory.
-
-2. A list of import paths, each denoting a package.
-
- The package's directory is found relative to the $GOROOT and
- $GOPATH using similar logic to 'go build', and the *.go files in
- that directory are loaded, parsed and type-checked as a single
- package.
-
- In addition, all *_test.go files in the directory are then loaded
- and parsed. Those files whose package declaration equals that of
- the non-*_test.go files are included in the primary package. Test
- files whose package declaration ends with "_test" are type-checked
- as another package, the 'external' test package, so that a single
- import path may denote two packages. (Whether this behaviour is
- enabled is tool-specific, and may depend on additional flags.)
-
-A '--' argument terminates the list of packages.
-`
-
-// FromArgs interprets args as a set of initial packages to load from
-// source and updates the configuration. It returns the list of
-// unconsumed arguments.
-//
-// It is intended for use in command-line interfaces that require a
-// set of initial packages to be specified; see FromArgsUsage message
-// for details.
-//
-// Only superficial errors are reported at this stage; errors dependent
-// on I/O are detected during Load.
-//
-func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
- var rest []string
- for i, arg := range args {
- if arg == "--" {
- rest = args[i+1:]
- args = args[:i]
- break // consume "--" and return the remaining args
- }
- }
-
- if len(args) > 0 && strings.HasSuffix(args[0], ".go") {
- // Assume args is a list of a *.go files
- // denoting a single ad hoc package.
- for _, arg := range args {
- if !strings.HasSuffix(arg, ".go") {
- return nil, fmt.Errorf("named files must be .go files: %s", arg)
- }
- }
- conf.CreateFromFilenames("", args...)
- } else {
- // Assume args are directories each denoting a
- // package and (perhaps) an external test, iff xtest.
- for _, arg := range args {
- if xtest {
- conf.ImportWithTests(arg)
- } else {
- conf.Import(arg)
- }
- }
- }
-
- return rest, nil
-}
-
-// CreateFromFilenames is a convenience function that adds
-// a conf.CreatePkgs entry to create a package of the specified *.go
-// files.
-//
-func (conf *Config) CreateFromFilenames(path string, filenames ...string) {
- conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames})
-}
-
-// CreateFromFiles is a convenience function that adds a conf.CreatePkgs
-// entry to create package of the specified path and parsed files.
-//
-func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
- conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files})
-}
-
-// ImportWithTests is a convenience function that adds path to
-// ImportPkgs, the set of initial source packages located relative to
-// $GOPATH. The package will be augmented by any *_test.go files in
-// its directory that contain a "package x" (not "package x_test")
-// declaration.
-//
-// In addition, if any *_test.go files contain a "package x_test"
-// declaration, an additional package comprising just those files will
-// be added to CreatePkgs.
-//
-func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) }
-
-// Import is a convenience function that adds path to ImportPkgs, the
-// set of initial packages that will be imported from source.
-//
-func (conf *Config) Import(path string) { conf.addImport(path, false) }
-
-func (conf *Config) addImport(path string, tests bool) {
- if path == "C" {
- return // ignore; not a real package
- }
- if conf.ImportPkgs == nil {
- conf.ImportPkgs = make(map[string]bool)
- }
- conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests
-}
-
-// PathEnclosingInterval returns the PackageInfo and ast.Node that
-// contain source interval [start, end), and all the node's ancestors
-// up to the AST root. It searches all ast.Files of all packages in prog.
-// exact is defined as for astutil.PathEnclosingInterval.
-//
-// The zero value is returned if not found.
-//
-func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) {
- for _, info := range prog.AllPackages {
- for _, f := range info.Files {
- if f.Pos() == token.NoPos {
- // This can happen if the parser saw
- // too many errors and bailed out.
- // (Use parser.AllErrors to prevent that.)
- continue
- }
- if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) {
- continue
- }
- if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil {
- return info, path, exact
- }
- }
- }
- return nil, nil, false
-}
-
-// InitialPackages returns a new slice containing the set of initial
-// packages (Created + Imported) in unspecified order.
-//
-func (prog *Program) InitialPackages() []*PackageInfo {
- infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported))
- infos = append(infos, prog.Created...)
- for _, info := range prog.Imported {
- infos = append(infos, info)
- }
- return infos
-}
-
-// Package returns the ASTs and results of type checking for the
-// specified package.
-func (prog *Program) Package(path string) *PackageInfo {
- if info, ok := prog.AllPackages[prog.importMap[path]]; ok {
- return info
- }
- for _, info := range prog.Created {
- if path == info.Pkg.Path() {
- return info
- }
- }
- return nil
-}
-
-// ---------- Implementation ----------
-
-// importer holds the working state of the algorithm.
-type importer struct {
- conf *Config // the client configuration
- start time.Time // for logging
-
- progMu sync.Mutex // guards prog
- prog *Program // the resulting program
-
- // findpkg is a memoization of FindPackage.
- findpkgMu sync.Mutex // guards findpkg
- findpkg map[findpkgKey]*findpkgValue
-
- importedMu sync.Mutex // guards imported
- imported map[string]*importInfo // all imported packages (incl. failures) by import path
-
- // import dependency graph: graph[x][y] => x imports y
- //
- // Since non-importable packages cannot be cyclic, we ignore
- // their imports, thus we only need the subgraph over importable
- // packages. Nodes are identified by their import paths.
- graphMu sync.Mutex
- graph map[string]map[string]bool
-}
-
-type findpkgKey struct {
- importPath string
- fromDir string
- mode build.ImportMode
-}
-
-type findpkgValue struct {
- ready chan struct{} // closed to broadcast readiness
- bp *build.Package
- err error
-}
-
-// importInfo tracks the success or failure of a single import.
-//
-// Upon completion, exactly one of info and err is non-nil:
-// info on successful creation of a package, err otherwise.
-// A successful package may still contain type errors.
-//
-type importInfo struct {
- path string // import path
- info *PackageInfo // results of typechecking (including errors)
- complete chan struct{} // closed to broadcast that info is set.
-}
-
-// awaitCompletion blocks until ii is complete,
-// i.e. the info field is safe to inspect.
-func (ii *importInfo) awaitCompletion() {
- <-ii.complete // wait for close
-}
-
-// Complete marks ii as complete.
-// Its info and err fields will not be subsequently updated.
-func (ii *importInfo) Complete(info *PackageInfo) {
- if info == nil {
- panic("info == nil")
- }
- ii.info = info
- close(ii.complete)
-}
-
-type importError struct {
- path string // import path
- err error // reason for failure to create a package
-}
-
-// Load creates the initial packages specified by conf.{Create,Import}Pkgs,
-// loading their dependencies packages as needed.
-//
-// On success, Load returns a Program containing a PackageInfo for
-// each package. On failure, it returns an error.
-//
-// If AllowErrors is true, Load will return a Program even if some
-// packages contained I/O, parser or type errors, or if dependencies
-// were missing. (Such errors are accessible via PackageInfo.Errors. If
-// false, Load will fail if any package had an error.
-//
-// It is an error if no packages were loaded.
-//
-func (conf *Config) Load() (*Program, error) {
- // Create a simple default error handler for parse/type errors.
- if conf.TypeChecker.Error == nil {
- conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) }
- }
-
- // Set default working directory for relative package references.
- if conf.Cwd == "" {
- var err error
- conf.Cwd, err = os.Getwd()
- if err != nil {
- return nil, err
- }
- }
-
- // Install default FindPackage hook using go/build logic.
- if conf.FindPackage == nil {
- conf.FindPackage = (*build.Context).Import
- }
-
- prog := &Program{
- Fset: conf.fset(),
- Imported: make(map[string]*PackageInfo),
- importMap: make(map[string]*types.Package),
- AllPackages: make(map[*types.Package]*PackageInfo),
- }
-
- imp := importer{
- conf: conf,
- prog: prog,
- findpkg: make(map[findpkgKey]*findpkgValue),
- imported: make(map[string]*importInfo),
- start: time.Now(),
- graph: make(map[string]map[string]bool),
- }
-
- // -- loading proper (concurrent phase) --------------------------------
-
- var errpkgs []string // packages that contained errors
-
- // Load the initially imported packages and their dependencies,
- // in parallel.
- // No vendor check on packages imported from the command line.
- infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor)
- for _, ie := range importErrors {
- conf.TypeChecker.Error(ie.err) // failed to create package
- errpkgs = append(errpkgs, ie.path)
- }
- for _, info := range infos {
- prog.Imported[info.Pkg.Path()] = info
- }
-
- // Augment the designated initial packages by their tests.
- // Dependencies are loaded in parallel.
- var xtestPkgs []*build.Package
- for importPath, augment := range conf.ImportPkgs {
- if !augment {
- continue
- }
-
- // No vendor check on packages imported from command line.
- bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor)
- if err != nil {
- // Package not found, or can't even parse package declaration.
- // Already reported by previous loop; ignore it.
- continue
- }
-
- // Needs external test package?
- if len(bp.XTestGoFiles) > 0 {
- xtestPkgs = append(xtestPkgs, bp)
- }
-
- // Consult the cache using the canonical package path.
- path := bp.ImportPath
- imp.importedMu.Lock() // (unnecessary, we're sequential here)
- ii, ok := imp.imported[path]
- // Paranoid checks added due to issue #11012.
- if !ok {
- // Unreachable.
- // The previous loop called importAll and thus
- // startLoad for each path in ImportPkgs, which
- // populates imp.imported[path] with a non-zero value.
- panic(fmt.Sprintf("imported[%q] not found", path))
- }
- if ii == nil {
- // Unreachable.
- // The ii values in this loop are the same as in
- // the previous loop, which enforced the invariant
- // that at least one of ii.err and ii.info is non-nil.
- panic(fmt.Sprintf("imported[%q] == nil", path))
- }
- if ii.info == nil {
- // Unreachable.
- // awaitCompletion has the postcondition
- // ii.info != nil.
- panic(fmt.Sprintf("imported[%q].info = nil", path))
- }
- info := ii.info
- imp.importedMu.Unlock()
-
- // Parse the in-package test files.
- files, errs := imp.conf.parsePackageFiles(bp, 't')
- for _, err := range errs {
- info.appendError(err)
- }
-
- // The test files augmenting package P cannot be imported,
- // but may import packages that import P,
- // so we must disable the cycle check.
- imp.addFiles(info, files, false)
- }
-
- createPkg := func(path string, files []*ast.File, errs []error) {
- // TODO(adonovan): fix: use dirname of files, not cwd.
- info := imp.newPackageInfo(path, conf.Cwd)
- for _, err := range errs {
- info.appendError(err)
- }
-
- // Ad hoc packages are non-importable,
- // so no cycle check is needed.
- // addFiles loads dependencies in parallel.
- imp.addFiles(info, files, false)
- prog.Created = append(prog.Created, info)
- }
-
- // Create packages specified by conf.CreatePkgs.
- for _, cp := range conf.CreatePkgs {
- files, errs := parseFiles(conf.fset(), conf.build(), nil, ".", cp.Filenames, conf.ParserMode)
- files = append(files, cp.Files...)
-
- path := cp.Path
- if path == "" {
- if len(files) > 0 {
- path = files[0].Name.Name
- } else {
- path = "(unnamed)"
- }
- }
- createPkg(path, files, errs)
- }
-
- // Create external test packages.
- sort.Sort(byImportPath(xtestPkgs))
- for _, bp := range xtestPkgs {
- files, errs := imp.conf.parsePackageFiles(bp, 'x')
- createPkg(bp.ImportPath+"_test", files, errs)
- }
-
- // -- finishing up (sequential) ----------------------------------------
-
- if len(prog.Imported)+len(prog.Created) == 0 {
- return nil, errors.New("no initial packages were loaded")
- }
-
- // Create infos for indirectly imported packages.
- // e.g. incomplete packages without syntax, loaded from export data.
- for _, obj := range prog.importMap {
- info := prog.AllPackages[obj]
- if info == nil {
- prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true}
- } else {
- // finished
- info.checker = nil
- info.errorFunc = nil
- }
- }
-
- if !conf.AllowErrors {
- // Report errors in indirectly imported packages.
- for _, info := range prog.AllPackages {
- if len(info.Errors) > 0 {
- errpkgs = append(errpkgs, info.Pkg.Path())
- }
- }
- if errpkgs != nil {
- var more string
- if len(errpkgs) > 3 {
- more = fmt.Sprintf(" and %d more", len(errpkgs)-3)
- errpkgs = errpkgs[:3]
- }
- return nil, fmt.Errorf("couldn't load packages due to errors: %s%s",
- strings.Join(errpkgs, ", "), more)
- }
- }
-
- markErrorFreePackages(prog.AllPackages)
-
- return prog, nil
-}
-
-type byImportPath []*build.Package
-
-func (b byImportPath) Len() int { return len(b) }
-func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath }
-func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
-
-// markErrorFreePackages sets the TransitivelyErrorFree flag on all
-// applicable packages.
-func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) {
- // Build the transpose of the import graph.
- importedBy := make(map[*types.Package]map[*types.Package]bool)
- for P := range allPackages {
- for _, Q := range P.Imports() {
- clients, ok := importedBy[Q]
- if !ok {
- clients = make(map[*types.Package]bool)
- importedBy[Q] = clients
- }
- clients[P] = true
- }
- }
-
- // Find all packages reachable from some error package.
- reachable := make(map[*types.Package]bool)
- var visit func(*types.Package)
- visit = func(p *types.Package) {
- if !reachable[p] {
- reachable[p] = true
- for q := range importedBy[p] {
- visit(q)
- }
- }
- }
- for _, info := range allPackages {
- if len(info.Errors) > 0 {
- visit(info.Pkg)
- }
- }
-
- // Mark the others as "transitively error-free".
- for _, info := range allPackages {
- if !reachable[info.Pkg] {
- info.TransitivelyErrorFree = true
- }
- }
-}
-
-// build returns the effective build context.
-func (conf *Config) build() *build.Context {
- if conf.Build != nil {
- return conf.Build
- }
- return &build.Default
-}
-
-// parsePackageFiles enumerates the files belonging to package path,
-// then loads, parses and returns them, plus a list of I/O or parse
-// errors that were encountered.
-//
-// 'which' indicates which files to include:
-// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
-// 't': include in-package *_test.go source files (TestGoFiles)
-// 'x': include external *_test.go source files. (XTestGoFiles)
-//
-func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) {
- if bp.ImportPath == "unsafe" {
- return nil, nil
- }
- var filenames []string
- switch which {
- case 'g':
- filenames = bp.GoFiles
- case 't':
- filenames = bp.TestGoFiles
- case 'x':
- filenames = bp.XTestGoFiles
- default:
- panic(which)
- }
-
- files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode)
-
- // Preprocess CgoFiles and parse the outputs (sequentially).
- if which == 'g' && bp.CgoFiles != nil {
- cgofiles, err := processCgoFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode)
- if err != nil {
- errs = append(errs, err)
- } else {
- files = append(files, cgofiles...)
- }
- }
-
- return files, errs
-}
-
-// doImport imports the package denoted by path.
-// It implements the types.Importer signature.
-//
-// It returns an error if a package could not be created
-// (e.g. go/build or parse error), but type errors are reported via
-// the types.Config.Error callback (the first of which is also saved
-// in the package's PackageInfo).
-//
-// Idempotent.
-//
-func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) {
- if to == "C" {
- // This should be unreachable, but ad hoc packages are
- // not currently subject to cgo preprocessing.
- // See https://github.com/golang/go/issues/11627.
- return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`,
- from.Pkg.Path())
- }
-
- bp, err := imp.findPackage(to, from.dir, 0)
- if err != nil {
- return nil, err
- }
-
- // The standard unsafe package is handled specially,
- // and has no PackageInfo.
- if bp.ImportPath == "unsafe" {
- return types.Unsafe, nil
- }
-
- // Look for the package in the cache using its canonical path.
- path := bp.ImportPath
- imp.importedMu.Lock()
- ii := imp.imported[path]
- imp.importedMu.Unlock()
- if ii == nil {
- panic("internal error: unexpected import: " + path)
- }
- if ii.info != nil {
- return ii.info.Pkg, nil
- }
-
- // Import of incomplete package: this indicates a cycle.
- fromPath := from.Pkg.Path()
- if cycle := imp.findPath(path, fromPath); cycle != nil {
- cycle = append([]string{fromPath}, cycle...)
- return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> "))
- }
-
- panic("internal error: import of incomplete (yet acyclic) package: " + fromPath)
-}
-
-// findPackage locates the package denoted by the importPath in the
-// specified directory.
-func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) {
- // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7)
- // to avoid holding the lock around FindPackage.
- key := findpkgKey{importPath, fromDir, mode}
- imp.findpkgMu.Lock()
- v, ok := imp.findpkg[key]
- if ok {
- // cache hit
- imp.findpkgMu.Unlock()
-
- <-v.ready // wait for entry to become ready
- } else {
- // Cache miss: this goroutine becomes responsible for
- // populating the map entry and broadcasting its readiness.
- v = &findpkgValue{ready: make(chan struct{})}
- imp.findpkg[key] = v
- imp.findpkgMu.Unlock()
-
- ioLimit <- true
- v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode)
- <-ioLimit
-
- if _, ok := v.err.(*build.NoGoError); ok {
- v.err = nil // empty directory is not an error
- }
-
- close(v.ready) // broadcast ready condition
- }
- return v.bp, v.err
-}
-
-// importAll loads, parses, and type-checks the specified packages in
-// parallel and returns their completed importInfos in unspecified order.
-//
-// fromPath is the package path of the importing package, if it is
-// importable, "" otherwise. It is used for cycle detection.
-//
-// fromDir is the directory containing the import declaration that
-// caused these imports.
-//
-func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) {
- // TODO(adonovan): opt: do the loop in parallel once
- // findPackage is non-blocking.
- var pending []*importInfo
- for importPath := range imports {
- bp, err := imp.findPackage(importPath, fromDir, mode)
- if err != nil {
- errors = append(errors, importError{
- path: importPath,
- err: err,
- })
- continue
- }
- pending = append(pending, imp.startLoad(bp))
- }
-
- if fromPath != "" {
- // We're loading a set of imports.
- //
- // We must record graph edges from the importing package
- // to its dependencies, and check for cycles.
- imp.graphMu.Lock()
- deps, ok := imp.graph[fromPath]
- if !ok {
- deps = make(map[string]bool)
- imp.graph[fromPath] = deps
- }
- for _, ii := range pending {
- deps[ii.path] = true
- }
- imp.graphMu.Unlock()
- }
-
- for _, ii := range pending {
- if fromPath != "" {
- if cycle := imp.findPath(ii.path, fromPath); cycle != nil {
- // Cycle-forming import: we must not await its
- // completion since it would deadlock.
- //
- // We don't record the error in ii since
- // the error is really associated with the
- // cycle-forming edge, not the package itself.
- // (Also it would complicate the
- // invariants of importPath completion.)
- if trace {
- fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle)
- }
- continue
- }
- }
- ii.awaitCompletion()
- infos = append(infos, ii.info)
- }
-
- return infos, errors
-}
-
-// findPath returns an arbitrary path from 'from' to 'to' in the import
-// graph, or nil if there was none.
-func (imp *importer) findPath(from, to string) []string {
- imp.graphMu.Lock()
- defer imp.graphMu.Unlock()
-
- seen := make(map[string]bool)
- var search func(stack []string, importPath string) []string
- search = func(stack []string, importPath string) []string {
- if !seen[importPath] {
- seen[importPath] = true
- stack = append(stack, importPath)
- if importPath == to {
- return stack
- }
- for x := range imp.graph[importPath] {
- if p := search(stack, x); p != nil {
- return p
- }
- }
- }
- return nil
- }
- return search(make([]string, 0, 20), from)
-}
-
-// startLoad initiates the loading, parsing and type-checking of the
-// specified package and its dependencies, if it has not already begun.
-//
-// It returns an importInfo, not necessarily in a completed state. The
-// caller must call awaitCompletion() before accessing its info field.
-//
-// startLoad is concurrency-safe and idempotent.
-//
-func (imp *importer) startLoad(bp *build.Package) *importInfo {
- path := bp.ImportPath
- imp.importedMu.Lock()
- ii, ok := imp.imported[path]
- if !ok {
- ii = &importInfo{path: path, complete: make(chan struct{})}
- imp.imported[path] = ii
- go func() {
- info := imp.load(bp)
- ii.Complete(info)
- }()
- }
- imp.importedMu.Unlock()
-
- return ii
-}
-
-// load implements package loading by parsing Go source files
-// located by go/build.
-func (imp *importer) load(bp *build.Package) *PackageInfo {
- info := imp.newPackageInfo(bp.ImportPath, bp.Dir)
- info.Importable = true
- files, errs := imp.conf.parsePackageFiles(bp, 'g')
- for _, err := range errs {
- info.appendError(err)
- }
-
- imp.addFiles(info, files, true)
-
- imp.progMu.Lock()
- imp.prog.importMap[bp.ImportPath] = info.Pkg
- imp.progMu.Unlock()
-
- return info
-}
-
-// addFiles adds and type-checks the specified files to info, loading
-// their dependencies if needed. The order of files determines the
-// package initialization order. It may be called multiple times on the
-// same package. Errors are appended to the info.Errors field.
-//
-// cycleCheck determines whether the imports within files create
-// dependency edges that should be checked for potential cycles.
-//
-func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) {
- // Ensure the dependencies are loaded, in parallel.
- var fromPath string
- if cycleCheck {
- fromPath = info.Pkg.Path()
- }
- // TODO(adonovan): opt: make the caller do scanImports.
- // Callers with a build.Package can skip it.
- imp.importAll(fromPath, info.dir, scanImports(files), 0)
-
- if trace {
- fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n",
- time.Since(imp.start), info.Pkg.Path(), len(files))
- }
-
- // Ignore the returned (first) error since we
- // already collect them all in the PackageInfo.
- info.checker.Files(files)
- info.Files = append(info.Files, files...)
-
- if imp.conf.AfterTypeCheck != nil {
- imp.conf.AfterTypeCheck(info, files)
- }
-
- if trace {
- fmt.Fprintf(os.Stderr, "%s: stop %q\n",
- time.Since(imp.start), info.Pkg.Path())
- }
-}
-
-func (imp *importer) newPackageInfo(path, dir string) *PackageInfo {
- pkg := types.NewPackage(path, "")
- info := &PackageInfo{
- Pkg: pkg,
- Info: types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Scopes: make(map[ast.Node]*types.Scope),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
- },
- errorFunc: imp.conf.TypeChecker.Error,
- dir: dir,
- }
-
- // Copy the types.Config so we can vary it across PackageInfos.
- tc := imp.conf.TypeChecker
- tc.IgnoreFuncBodies = false
- if f := imp.conf.TypeCheckFuncBodies; f != nil {
- tc.IgnoreFuncBodies = !f(path)
- }
- tc.Importer = closure{imp, info}
- tc.Error = info.appendError // appendError wraps the user's Error function
-
- info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info)
- imp.progMu.Lock()
- imp.prog.AllPackages[pkg] = info
- imp.progMu.Unlock()
- return info
-}
-
-type closure struct {
- imp *importer
- info *PackageInfo
-}
-
-func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) }
diff --git a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/util.go b/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/util.go
deleted file mode 100644
index 7f38dd74..00000000
--- a/vendor/github.com/aws/aws-sdk-go/awsmigrate/awsmigrate-renamer/vendor/golang.org/x/tools/go/loader/util.go
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package loader
-
-import (
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "io"
- "os"
- "strconv"
- "sync"
-
- "golang.org/x/tools/go/buildutil"
-)
-
-// We use a counting semaphore to limit
-// the number of parallel I/O calls per process.
-var ioLimit = make(chan bool, 10)
-
-// parseFiles parses the Go source files within directory dir and
-// returns the ASTs of the ones that could be at least partially parsed,
-// along with a list of I/O and parse errors encountered.
-//
-// I/O is done via ctxt, which may specify a virtual file system.
-// displayPath is used to transform the filenames attached to the ASTs.
-//
-func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) {
- if displayPath == nil {
- displayPath = func(path string) string { return path }
- }
- var wg sync.WaitGroup
- n := len(files)
- parsed := make([]*ast.File, n)
- errors := make([]error, n)
- for i, file := range files {
- if !buildutil.IsAbsPath(ctxt, file) {
- file = buildutil.JoinPath(ctxt, dir, file)
- }
- wg.Add(1)
- go func(i int, file string) {
- ioLimit <- true // wait
- defer func() {
- wg.Done()
- <-ioLimit // signal
- }()
- var rd io.ReadCloser
- var err error
- if ctxt.OpenFile != nil {
- rd, err = ctxt.OpenFile(file)
- } else {
- rd, err = os.Open(file)
- }
- if err != nil {
- errors[i] = err // open failed
- return
- }
-
- // ParseFile may return both an AST and an error.
- parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode)
- rd.Close()
- }(i, file)
- }
- wg.Wait()
-
- // Eliminate nils, preserving order.
- var o int
- for _, f := range parsed {
- if f != nil {
- parsed[o] = f
- o++
- }
- }
- parsed = parsed[:o]
-
- o = 0
- for _, err := range errors {
- if err != nil {
- errors[o] = err
- o++
- }
- }
- errors = errors[:o]
-
- return parsed, errors
-}
-
-// scanImports returns the set of all import paths from all
-// import specs in the specified files.
-func scanImports(files []*ast.File) map[string]bool {
- imports := make(map[string]bool)
- for _, f := range files {
- for _, decl := range f.Decls {
- if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT {
- for _, spec := range decl.Specs {
- spec := spec.(*ast.ImportSpec)
-
- // NB: do not assume the program is well-formed!
- path, err := strconv.Unquote(spec.Path.Value)
- if err != nil {
- continue // quietly ignore the error
- }
- if path == "C" {
- continue // skip pseudopackage
- }
- imports[path] = true
- }
- }
- }
- }
- return imports
-}
-
-// ---------- Internal helpers ----------
-
-// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos)
-func tokenFileContainsPos(f *token.File, pos token.Pos) bool {
- p := int(pos)
- base := f.Base()
- return base <= p && p < base+f.Size()
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/.gitignore b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/.gitignore
deleted file mode 100644
index c5203bf6..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-testdata/conf_out.ini
-ini.sublime-project
-ini.sublime-workspace
-testdata/conf_reflect.ini
-.idea
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/.travis.yml b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/.travis.yml
deleted file mode 100644
index 65c872ba..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/.travis.yml
+++ /dev/null
@@ -1,17 +0,0 @@
-sudo: false
-language: go
-go:
- - 1.4.x
- - 1.5.x
- - 1.6.x
- - 1.7.x
- - master
-
-script:
- - go get golang.org/x/tools/cmd/cover
- - go get github.com/smartystreets/goconvey
- - go test -v -cover -race
-
-notifications:
- email:
- - u@gogs.io
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/LICENSE b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/LICENSE
deleted file mode 100644
index 37ec93a1..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/LICENSE
+++ /dev/null
@@ -1,191 +0,0 @@
-Apache License
-Version 2.0, January 2004
-http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-"License" shall mean the terms and conditions for use, reproduction, and
-distribution as defined by Sections 1 through 9 of this document.
-
-"Licensor" shall mean the copyright owner or entity authorized by the copyright
-owner that is granting the License.
-
-"Legal Entity" shall mean the union of the acting entity and all other entities
-that control, are controlled by, or are under common control with that entity.
-For the purposes of this definition, "control" means (i) the power, direct or
-indirect, to cause the direction or management of such entity, whether by
-contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
-outstanding shares, or (iii) beneficial ownership of such entity.
-
-"You" (or "Your") shall mean an individual or Legal Entity exercising
-permissions granted by this License.
-
-"Source" form shall mean the preferred form for making modifications, including
-but not limited to software source code, documentation source, and configuration
-files.
-
-"Object" form shall mean any form resulting from mechanical transformation or
-translation of a Source form, including but not limited to compiled object code,
-generated documentation, and conversions to other media types.
-
-"Work" shall mean the work of authorship, whether in Source or Object form, made
-available under the License, as indicated by a copyright notice that is included
-in or attached to the work (an example is provided in the Appendix below).
-
-"Derivative Works" shall mean any work, whether in Source or Object form, that
-is based on (or derived from) the Work and for which the editorial revisions,
-annotations, elaborations, or other modifications represent, as a whole, an
-original work of authorship. For the purposes of this License, Derivative Works
-shall not include works that remain separable from, or merely link (or bind by
-name) to the interfaces of, the Work and Derivative Works thereof.
-
-"Contribution" shall mean any work of authorship, including the original version
-of the Work and any modifications or additions to that Work or Derivative Works
-thereof, that is intentionally submitted to Licensor for inclusion in the Work
-by the copyright owner or by an individual or Legal Entity authorized to submit
-on behalf of the copyright owner. For the purposes of this definition,
-"submitted" means any form of electronic, verbal, or written communication sent
-to the Licensor or its representatives, including but not limited to
-communication on electronic mailing lists, source code control systems, and
-issue tracking systems that are managed by, or on behalf of, the Licensor for
-the purpose of discussing and improving the Work, but excluding communication
-that is conspicuously marked or otherwise designated in writing by the copyright
-owner as "Not a Contribution."
-
-"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
-of whom a Contribution has been received by Licensor and subsequently
-incorporated within the Work.
-
-2. Grant of Copyright License.
-
-Subject to the terms and conditions of this License, each Contributor hereby
-grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
-irrevocable copyright license to reproduce, prepare Derivative Works of,
-publicly display, publicly perform, sublicense, and distribute the Work and such
-Derivative Works in Source or Object form.
-
-3. Grant of Patent License.
-
-Subject to the terms and conditions of this License, each Contributor hereby
-grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
-irrevocable (except as stated in this section) patent license to make, have
-made, use, offer to sell, sell, import, and otherwise transfer the Work, where
-such license applies only to those patent claims licensable by such Contributor
-that are necessarily infringed by their Contribution(s) alone or by combination
-of their Contribution(s) with the Work to which such Contribution(s) was
-submitted. If You institute patent litigation against any entity (including a
-cross-claim or counterclaim in a lawsuit) alleging that the Work or a
-Contribution incorporated within the Work constitutes direct or contributory
-patent infringement, then any patent licenses granted to You under this License
-for that Work shall terminate as of the date such litigation is filed.
-
-4. Redistribution.
-
-You may reproduce and distribute copies of the Work or Derivative Works thereof
-in any medium, with or without modifications, and in Source or Object form,
-provided that You meet the following conditions:
-
-You must give any other recipients of the Work or Derivative Works a copy of
-this License; and
-You must cause any modified files to carry prominent notices stating that You
-changed the files; and
-You must retain, in the Source form of any Derivative Works that You distribute,
-all copyright, patent, trademark, and attribution notices from the Source form
-of the Work, excluding those notices that do not pertain to any part of the
-Derivative Works; and
-If the Work includes a "NOTICE" text file as part of its distribution, then any
-Derivative Works that You distribute must include a readable copy of the
-attribution notices contained within such NOTICE file, excluding those notices
-that do not pertain to any part of the Derivative Works, in at least one of the
-following places: within a NOTICE text file distributed as part of the
-Derivative Works; within the Source form or documentation, if provided along
-with the Derivative Works; or, within a display generated by the Derivative
-Works, if and wherever such third-party notices normally appear. The contents of
-the NOTICE file are for informational purposes only and do not modify the
-License. You may add Your own attribution notices within Derivative Works that
-You distribute, alongside or as an addendum to the NOTICE text from the Work,
-provided that such additional attribution notices cannot be construed as
-modifying the License.
-You may add Your own copyright statement to Your modifications and may provide
-additional or different license terms and conditions for use, reproduction, or
-distribution of Your modifications, or for any such Derivative Works as a whole,
-provided Your use, reproduction, and distribution of the Work otherwise complies
-with the conditions stated in this License.
-
-5. Submission of Contributions.
-
-Unless You explicitly state otherwise, any Contribution intentionally submitted
-for inclusion in the Work by You to the Licensor shall be under the terms and
-conditions of this License, without any additional terms or conditions.
-Notwithstanding the above, nothing herein shall supersede or modify the terms of
-any separate license agreement you may have executed with Licensor regarding
-such Contributions.
-
-6. Trademarks.
-
-This License does not grant permission to use the trade names, trademarks,
-service marks, or product names of the Licensor, except as required for
-reasonable and customary use in describing the origin of the Work and
-reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty.
-
-Unless required by applicable law or agreed to in writing, Licensor provides the
-Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
-including, without limitation, any warranties or conditions of TITLE,
-NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
-solely responsible for determining the appropriateness of using or
-redistributing the Work and assume any risks associated with Your exercise of
-permissions under this License.
-
-8. Limitation of Liability.
-
-In no event and under no legal theory, whether in tort (including negligence),
-contract, or otherwise, unless required by applicable law (such as deliberate
-and grossly negligent acts) or agreed to in writing, shall any Contributor be
-liable to You for damages, including any direct, indirect, special, incidental,
-or consequential damages of any character arising as a result of this License or
-out of the use or inability to use the Work (including but not limited to
-damages for loss of goodwill, work stoppage, computer failure or malfunction, or
-any and all other commercial damages or losses), even if such Contributor has
-been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability.
-
-While redistributing the Work or Derivative Works thereof, You may choose to
-offer, and charge a fee for, acceptance of support, warranty, indemnity, or
-other liability obligations and/or rights consistent with this License. However,
-in accepting such obligations, You may act only on Your own behalf and on Your
-sole responsibility, not on behalf of any other Contributor, and only if You
-agree to indemnify, defend, and hold each Contributor harmless for any liability
-incurred by, or claims asserted against, such Contributor by reason of your
-accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work
-
-To apply the Apache License to your work, attach the following boilerplate
-notice, with the fields enclosed by brackets "[]" replaced with your own
-identifying information. (Don't include the brackets!) The text should be
-enclosed in the appropriate comment syntax for the file format. We also
-recommend that a file or class name and description of purpose be included on
-the same "printed page" as the copyright notice for easier identification within
-third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/Makefile b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/Makefile
deleted file mode 100644
index ac034e52..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/Makefile
+++ /dev/null
@@ -1,12 +0,0 @@
-.PHONY: build test bench vet
-
-build: vet bench
-
-test:
- go test -v -cover -race
-
-bench:
- go test -v -cover -race -test.bench=. -test.benchmem
-
-vet:
- go vet
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/README.md b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/README.md
deleted file mode 100644
index 85947422..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/README.md
+++ /dev/null
@@ -1,740 +0,0 @@
-INI [![Build Status](https://travis-ci.org/go-ini/ini.svg?branch=master)](https://travis-ci.org/go-ini/ini) [![Sourcegraph](https://sourcegraph.com/github.com/go-ini/ini/-/badge.svg)](https://sourcegraph.com/github.com/go-ini/ini?badge)
-===
-
-![](https://avatars0.githubusercontent.com/u/10216035?v=3&s=200)
-
-Package ini provides INI file read and write functionality in Go.
-
-[简体中文](README_ZH.md)
-
-## Feature
-
-- Load multiple data sources(`[]byte`, file and `io.ReadCloser`) with overwrites.
-- Read with recursion values.
-- Read with parent-child sections.
-- Read with auto-increment key names.
-- Read with multiple-line values.
-- Read with tons of helper methods.
-- Read and convert values to Go types.
-- Read and **WRITE** comments of sections and keys.
-- Manipulate sections, keys and comments with ease.
-- Keep sections and keys in order as you parse and save.
-
-## Installation
-
-To use a tagged revision:
-
- go get gopkg.in/ini.v1
-
-To use with latest changes:
-
- go get github.com/go-ini/ini
-
-Please add `-u` flag to update in the future.
-
-### Testing
-
-If you want to test on your machine, please apply `-t` flag:
-
- go get -t gopkg.in/ini.v1
-
-Please add `-u` flag to update in the future.
-
-## Getting Started
-
-### Loading from data sources
-
-A **Data Source** is either raw data in type `[]byte`, a file name with type `string` or `io.ReadCloser`. You can load **as many data sources as you want**. Passing other types will simply return an error.
-
-```go
-cfg, err := ini.Load([]byte("raw data"), "filename", ioutil.NopCloser(bytes.NewReader([]byte("some other data"))))
-```
-
-Or start with an empty object:
-
-```go
-cfg := ini.Empty()
-```
-
-When you cannot decide how many data sources to load at the beginning, you will still be able to **Append()** them later.
-
-```go
-err := cfg.Append("other file", []byte("other raw data"))
-```
-
-If you have a list of files with possibilities that some of them may not available at the time, and you don't know exactly which ones, you can use `LooseLoad` to ignore nonexistent files without returning error.
-
-```go
-cfg, err := ini.LooseLoad("filename", "filename_404")
-```
-
-The cool thing is, whenever the file is available to load while you're calling `Reload` method, it will be counted as usual.
-
-#### Ignore cases of key name
-
-When you do not care about cases of section and key names, you can use `InsensitiveLoad` to force all names to be lowercased while parsing.
-
-```go
-cfg, err := ini.InsensitiveLoad("filename")
-//...
-
-// sec1 and sec2 are the exactly same section object
-sec1, err := cfg.GetSection("Section")
-sec2, err := cfg.GetSection("SecTIOn")
-
-// key1 and key2 are the exactly same key object
-key1, err := cfg.GetKey("Key")
-key2, err := cfg.GetKey("KeY")
-```
-
-#### MySQL-like boolean key
-
-MySQL's configuration allows a key without value as follows:
-
-```ini
-[mysqld]
-...
-skip-host-cache
-skip-name-resolve
-```
-
-By default, this is considered as missing value. But if you know you're going to deal with those cases, you can assign advanced load options:
-
-```go
-cfg, err := LoadSources(LoadOptions{AllowBooleanKeys: true}, "my.cnf"))
-```
-
-The value of those keys are always `true`, and when you save to a file, it will keep in the same foramt as you read.
-
-To generate such keys in your program, you could use `NewBooleanKey`:
-
-```go
-key, err := sec.NewBooleanKey("skip-host-cache")
-```
-
-#### Comment
-
-Take care that following format will be treated as comment:
-
-1. Line begins with `#` or `;`
-2. Words after `#` or `;`
-3. Words after section name (i.e words after `[some section name]`)
-
-If you want to save a value with `#` or `;`, please quote them with ``` ` ``` or ``` """ ```.
-
-### Working with sections
-
-To get a section, you would need to:
-
-```go
-section, err := cfg.GetSection("section name")
-```
-
-For a shortcut for default section, just give an empty string as name:
-
-```go
-section, err := cfg.GetSection("")
-```
-
-When you're pretty sure the section exists, following code could make your life easier:
-
-```go
-section := cfg.Section("section name")
-```
-
-What happens when the section somehow does not exist? Don't panic, it automatically creates and returns a new section to you.
-
-To create a new section:
-
-```go
-err := cfg.NewSection("new section")
-```
-
-To get a list of sections or section names:
-
-```go
-sections := cfg.Sections()
-names := cfg.SectionStrings()
-```
-
-### Working with keys
-
-To get a key under a section:
-
-```go
-key, err := cfg.Section("").GetKey("key name")
-```
-
-Same rule applies to key operations:
-
-```go
-key := cfg.Section("").Key("key name")
-```
-
-To check if a key exists:
-
-```go
-yes := cfg.Section("").HasKey("key name")
-```
-
-To create a new key:
-
-```go
-err := cfg.Section("").NewKey("name", "value")
-```
-
-To get a list of keys or key names:
-
-```go
-keys := cfg.Section("").Keys()
-names := cfg.Section("").KeyStrings()
-```
-
-To get a clone hash of keys and corresponding values:
-
-```go
-hash := cfg.Section("").KeysHash()
-```
-
-### Working with values
-
-To get a string value:
-
-```go
-val := cfg.Section("").Key("key name").String()
-```
-
-To validate key value on the fly:
-
-```go
-val := cfg.Section("").Key("key name").Validate(func(in string) string {
- if len(in) == 0 {
- return "default"
- }
- return in
-})
-```
-
-If you do not want any auto-transformation (such as recursive read) for the values, you can get raw value directly (this way you get much better performance):
-
-```go
-val := cfg.Section("").Key("key name").Value()
-```
-
-To check if raw value exists:
-
-```go
-yes := cfg.Section("").HasValue("test value")
-```
-
-To get value with types:
-
-```go
-// For boolean values:
-// true when value is: 1, t, T, TRUE, true, True, YES, yes, Yes, y, ON, on, On
-// false when value is: 0, f, F, FALSE, false, False, NO, no, No, n, OFF, off, Off
-v, err = cfg.Section("").Key("BOOL").Bool()
-v, err = cfg.Section("").Key("FLOAT64").Float64()
-v, err = cfg.Section("").Key("INT").Int()
-v, err = cfg.Section("").Key("INT64").Int64()
-v, err = cfg.Section("").Key("UINT").Uint()
-v, err = cfg.Section("").Key("UINT64").Uint64()
-v, err = cfg.Section("").Key("TIME").TimeFormat(time.RFC3339)
-v, err = cfg.Section("").Key("TIME").Time() // RFC3339
-
-v = cfg.Section("").Key("BOOL").MustBool()
-v = cfg.Section("").Key("FLOAT64").MustFloat64()
-v = cfg.Section("").Key("INT").MustInt()
-v = cfg.Section("").Key("INT64").MustInt64()
-v = cfg.Section("").Key("UINT").MustUint()
-v = cfg.Section("").Key("UINT64").MustUint64()
-v = cfg.Section("").Key("TIME").MustTimeFormat(time.RFC3339)
-v = cfg.Section("").Key("TIME").MustTime() // RFC3339
-
-// Methods start with Must also accept one argument for default value
-// when key not found or fail to parse value to given type.
-// Except method MustString, which you have to pass a default value.
-
-v = cfg.Section("").Key("String").MustString("default")
-v = cfg.Section("").Key("BOOL").MustBool(true)
-v = cfg.Section("").Key("FLOAT64").MustFloat64(1.25)
-v = cfg.Section("").Key("INT").MustInt(10)
-v = cfg.Section("").Key("INT64").MustInt64(99)
-v = cfg.Section("").Key("UINT").MustUint(3)
-v = cfg.Section("").Key("UINT64").MustUint64(6)
-v = cfg.Section("").Key("TIME").MustTimeFormat(time.RFC3339, time.Now())
-v = cfg.Section("").Key("TIME").MustTime(time.Now()) // RFC3339
-```
-
-What if my value is three-line long?
-
-```ini
-[advance]
-ADDRESS = """404 road,
-NotFound, State, 5000
-Earth"""
-```
-
-Not a problem!
-
-```go
-cfg.Section("advance").Key("ADDRESS").String()
-
-/* --- start ---
-404 road,
-NotFound, State, 5000
-Earth
------- end --- */
-```
-
-That's cool, how about continuation lines?
-
-```ini
-[advance]
-two_lines = how about \
- continuation lines?
-lots_of_lines = 1 \
- 2 \
- 3 \
- 4
-```
-
-Piece of cake!
-
-```go
-cfg.Section("advance").Key("two_lines").String() // how about continuation lines?
-cfg.Section("advance").Key("lots_of_lines").String() // 1 2 3 4
-```
-
-Well, I hate continuation lines, how do I disable that?
-
-```go
-cfg, err := ini.LoadSources(ini.LoadOptions{
- IgnoreContinuation: true,
-}, "filename")
-```
-
-Holy crap!
-
-Note that single quotes around values will be stripped:
-
-```ini
-foo = "some value" // foo: some value
-bar = 'some value' // bar: some value
-```
-
-That's all? Hmm, no.
-
-#### Helper methods of working with values
-
-To get value with given candidates:
-
-```go
-v = cfg.Section("").Key("STRING").In("default", []string{"str", "arr", "types"})
-v = cfg.Section("").Key("FLOAT64").InFloat64(1.1, []float64{1.25, 2.5, 3.75})
-v = cfg.Section("").Key("INT").InInt(5, []int{10, 20, 30})
-v = cfg.Section("").Key("INT64").InInt64(10, []int64{10, 20, 30})
-v = cfg.Section("").Key("UINT").InUint(4, []int{3, 6, 9})
-v = cfg.Section("").Key("UINT64").InUint64(8, []int64{3, 6, 9})
-v = cfg.Section("").Key("TIME").InTimeFormat(time.RFC3339, time.Now(), []time.Time{time1, time2, time3})
-v = cfg.Section("").Key("TIME").InTime(time.Now(), []time.Time{time1, time2, time3}) // RFC3339
-```
-
-Default value will be presented if value of key is not in candidates you given, and default value does not need be one of candidates.
-
-To validate value in a given range:
-
-```go
-vals = cfg.Section("").Key("FLOAT64").RangeFloat64(0.0, 1.1, 2.2)
-vals = cfg.Section("").Key("INT").RangeInt(0, 10, 20)
-vals = cfg.Section("").Key("INT64").RangeInt64(0, 10, 20)
-vals = cfg.Section("").Key("UINT").RangeUint(0, 3, 9)
-vals = cfg.Section("").Key("UINT64").RangeUint64(0, 3, 9)
-vals = cfg.Section("").Key("TIME").RangeTimeFormat(time.RFC3339, time.Now(), minTime, maxTime)
-vals = cfg.Section("").Key("TIME").RangeTime(time.Now(), minTime, maxTime) // RFC3339
-```
-
-##### Auto-split values into a slice
-
-To use zero value of type for invalid inputs:
-
-```go
-// Input: 1.1, 2.2, 3.3, 4.4 -> [1.1 2.2 3.3 4.4]
-// Input: how, 2.2, are, you -> [0.0 2.2 0.0 0.0]
-vals = cfg.Section("").Key("STRINGS").Strings(",")
-vals = cfg.Section("").Key("FLOAT64S").Float64s(",")
-vals = cfg.Section("").Key("INTS").Ints(",")
-vals = cfg.Section("").Key("INT64S").Int64s(",")
-vals = cfg.Section("").Key("UINTS").Uints(",")
-vals = cfg.Section("").Key("UINT64S").Uint64s(",")
-vals = cfg.Section("").Key("TIMES").Times(",")
-```
-
-To exclude invalid values out of result slice:
-
-```go
-// Input: 1.1, 2.2, 3.3, 4.4 -> [1.1 2.2 3.3 4.4]
-// Input: how, 2.2, are, you -> [2.2]
-vals = cfg.Section("").Key("FLOAT64S").ValidFloat64s(",")
-vals = cfg.Section("").Key("INTS").ValidInts(",")
-vals = cfg.Section("").Key("INT64S").ValidInt64s(",")
-vals = cfg.Section("").Key("UINTS").ValidUints(",")
-vals = cfg.Section("").Key("UINT64S").ValidUint64s(",")
-vals = cfg.Section("").Key("TIMES").ValidTimes(",")
-```
-
-Or to return nothing but error when have invalid inputs:
-
-```go
-// Input: 1.1, 2.2, 3.3, 4.4 -> [1.1 2.2 3.3 4.4]
-// Input: how, 2.2, are, you -> error
-vals = cfg.Section("").Key("FLOAT64S").StrictFloat64s(",")
-vals = cfg.Section("").Key("INTS").StrictInts(",")
-vals = cfg.Section("").Key("INT64S").StrictInt64s(",")
-vals = cfg.Section("").Key("UINTS").StrictUints(",")
-vals = cfg.Section("").Key("UINT64S").StrictUint64s(",")
-vals = cfg.Section("").Key("TIMES").StrictTimes(",")
-```
-
-### Save your configuration
-
-Finally, it's time to save your configuration to somewhere.
-
-A typical way to save configuration is writing it to a file:
-
-```go
-// ...
-err = cfg.SaveTo("my.ini")
-err = cfg.SaveToIndent("my.ini", "\t")
-```
-
-Another way to save is writing to a `io.Writer` interface:
-
-```go
-// ...
-cfg.WriteTo(writer)
-cfg.WriteToIndent(writer, "\t")
-```
-
-By default, spaces are used to align "=" sign between key and values, to disable that:
-
-```go
-ini.PrettyFormat = false
-```
-
-## Advanced Usage
-
-### Recursive Values
-
-For all value of keys, there is a special syntax `%(<name>)s`, where `<name>` is the key name in same section or default section, and `%(<name>)s` will be replaced by corresponding value(empty string if key not found). You can use this syntax at most 99 level of recursions.
-
-```ini
-NAME = ini
-
-[author]
-NAME = Unknwon
-GITHUB = https://github.com/%(NAME)s
-
-[package]
-FULL_NAME = github.com/go-ini/%(NAME)s
-```
-
-```go
-cfg.Section("author").Key("GITHUB").String() // https://github.com/Unknwon
-cfg.Section("package").Key("FULL_NAME").String() // github.com/go-ini/ini
-```
-
-### Parent-child Sections
-
-You can use `.` in section name to indicate parent-child relationship between two or more sections. If the key not found in the child section, library will try again on its parent section until there is no parent section.
-
-```ini
-NAME = ini
-VERSION = v1
-IMPORT_PATH = gopkg.in/%(NAME)s.%(VERSION)s
-
-[package]
-CLONE_URL = https://%(IMPORT_PATH)s
-
-[package.sub]
-```
-
-```go
-cfg.Section("package.sub").Key("CLONE_URL").String() // https://gopkg.in/ini.v1
-```
-
-#### Retrieve parent keys available to a child section
-
-```go
-cfg.Section("package.sub").ParentKeys() // ["CLONE_URL"]
-```
-
-### Unparseable Sections
-
-Sometimes, you have sections that do not contain key-value pairs but raw content, to handle such case, you can use `LoadOptions.UnparsableSections`:
-
-```go
-cfg, err := LoadSources(LoadOptions{UnparseableSections: []string{"COMMENTS"}}, `[COMMENTS]
-<1><L.Slide#2> This slide has the fuel listed in the wrong units <e.1>`))
-
-body := cfg.Section("COMMENTS").Body()
-
-/* --- start ---
-<1><L.Slide#2> This slide has the fuel listed in the wrong units <e.1>
------- end --- */
-```
-
-### Auto-increment Key Names
-
-If key name is `-` in data source, then it would be seen as special syntax for auto-increment key name start from 1, and every section is independent on counter.
-
-```ini
-[features]
--: Support read/write comments of keys and sections
--: Support auto-increment of key names
--: Support load multiple files to overwrite key values
-```
-
-```go
-cfg.Section("features").KeyStrings() // []{"#1", "#2", "#3"}
-```
-
-### Map To Struct
-
-Want more objective way to play with INI? Cool.
-
-```ini
-Name = Unknwon
-age = 21
-Male = true
-Born = 1993-01-01T20:17:05Z
-
-[Note]
-Content = Hi is a good man!
-Cities = HangZhou, Boston
-```
-
-```go
-type Note struct {
- Content string
- Cities []string
-}
-
-type Person struct {
- Name string
- Age int `ini:"age"`
- Male bool
- Born time.Time
- Note
- Created time.Time `ini:"-"`
-}
-
-func main() {
- cfg, err := ini.Load("path/to/ini")
- // ...
- p := new(Person)
- err = cfg.MapTo(p)
- // ...
-
- // Things can be simpler.
- err = ini.MapTo(p, "path/to/ini")
- // ...
-
- // Just map a section? Fine.
- n := new(Note)
- err = cfg.Section("Note").MapTo(n)
- // ...
-}
-```
-
-Can I have default value for field? Absolutely.
-
-Assign it before you map to struct. It will keep the value as it is if the key is not presented or got wrong type.
-
-```go
-// ...
-p := &Person{
- Name: "Joe",
-}
-// ...
-```
-
-It's really cool, but what's the point if you can't give me my file back from struct?
-
-### Reflect From Struct
-
-Why not?
-
-```go
-type Embeded struct {
- Dates []time.Time `delim:"|"`
- Places []string `ini:"places,omitempty"`
- None []int `ini:",omitempty"`
-}
-
-type Author struct {
- Name string `ini:"NAME"`
- Male bool
- Age int
- GPA float64
- NeverMind string `ini:"-"`
- *Embeded
-}
-
-func main() {
- a := &Author{"Unknwon", true, 21, 2.8, "",
- &Embeded{
- []time.Time{time.Now(), time.Now()},
- []string{"HangZhou", "Boston"},
- []int{},
- }}
- cfg := ini.Empty()
- err = ini.ReflectFrom(cfg, a)
- // ...
-}
-```
-
-So, what do I get?
-
-```ini
-NAME = Unknwon
-Male = true
-Age = 21
-GPA = 2.8
-
-[Embeded]
-Dates = 2015-08-07T22:14:22+08:00|2015-08-07T22:14:22+08:00
-places = HangZhou,Boston
-```
-
-#### Name Mapper
-
-To save your time and make your code cleaner, this library supports [`NameMapper`](https://gowalker.org/gopkg.in/ini.v1#NameMapper) between struct field and actual section and key name.
-
-There are 2 built-in name mappers:
-
-- `AllCapsUnderscore`: it converts to format `ALL_CAPS_UNDERSCORE` then match section or key.
-- `TitleUnderscore`: it converts to format `title_underscore` then match section or key.
-
-To use them:
-
-```go
-type Info struct {
- PackageName string
-}
-
-func main() {
- err = ini.MapToWithMapper(&Info{}, ini.TitleUnderscore, []byte("package_name=ini"))
- // ...
-
- cfg, err := ini.Load([]byte("PACKAGE_NAME=ini"))
- // ...
- info := new(Info)
- cfg.NameMapper = ini.AllCapsUnderscore
- err = cfg.MapTo(info)
- // ...
-}
-```
-
-Same rules of name mapper apply to `ini.ReflectFromWithMapper` function.
-
-#### Value Mapper
-
-To expand values (e.g. from environment variables), you can use the `ValueMapper` to transform values:
-
-```go
-type Env struct {
- Foo string `ini:"foo"`
-}
-
-func main() {
- cfg, err := ini.Load([]byte("[env]\nfoo = ${MY_VAR}\n")
- cfg.ValueMapper = os.ExpandEnv
- // ...
- env := &Env{}
- err = cfg.Section("env").MapTo(env)
-}
-```
-
-This would set the value of `env.Foo` to the value of the environment variable `MY_VAR`.
-
-#### Other Notes On Map/Reflect
-
-Any embedded struct is treated as a section by default, and there is no automatic parent-child relations in map/reflect feature:
-
-```go
-type Child struct {
- Age string
-}
-
-type Parent struct {
- Name string
- Child
-}
-
-type Config struct {
- City string
- Parent
-}
-```
-
-Example configuration:
-
-```ini
-City = Boston
-
-[Parent]
-Name = Unknwon
-
-[Child]
-Age = 21
-```
-
-What if, yes, I'm paranoid, I want embedded struct to be in the same section. Well, all roads lead to Rome.
-
-```go
-type Child struct {
- Age string
-}
-
-type Parent struct {
- Name string
- Child `ini:"Parent"`
-}
-
-type Config struct {
- City string
- Parent
-}
-```
-
-Example configuration:
-
-```ini
-City = Boston
-
-[Parent]
-Name = Unknwon
-Age = 21
-```
-
-## Getting Help
-
-- [API Documentation](https://gowalker.org/gopkg.in/ini.v1)
-- [File An Issue](https://github.com/go-ini/ini/issues/new)
-
-## FAQs
-
-### What does `BlockMode` field do?
-
-By default, library lets you read and write values so we need a locker to make sure your data is safe. But in cases that you are very sure about only reading data through the library, you can set `cfg.BlockMode = false` to speed up read operations about **50-70%** faster.
-
-### Why another INI library?
-
-Many people are using my another INI library [goconfig](https://github.com/Unknwon/goconfig), so the reason for this one is I would like to make more Go style code. Also when you set `cfg.BlockMode = false`, this one is about **10-30%** faster.
-
-To make those changes I have to confirm API broken, so it's safer to keep it in another place and start using `gopkg.in` to version my package at this time.(PS: shorter import path)
-
-## License
-
-This project is under Apache v2 License. See the [LICENSE](LICENSE) file for the full license text.
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/README_ZH.md b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/README_ZH.md
deleted file mode 100644
index 163432db..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/README_ZH.md
+++ /dev/null
@@ -1,727 +0,0 @@
-本包提供了 Go 语言中读写 INI 文件的功能。
-
-## 功能特性
-
-- 支持覆盖加载多个数据源(`[]byte`、文件和 `io.ReadCloser`)
-- 支持递归读取键值
-- 支持读取父子分区
-- 支持读取自增键名
-- 支持读取多行的键值
-- 支持大量辅助方法
-- 支持在读取时直接转换为 Go 语言类型
-- 支持读取和 **写入** 分区和键的注释
-- 轻松操作分区、键值和注释
-- 在保存文件时分区和键值会保持原有的顺序
-
-## 下载安装
-
-使用一个特定版本:
-
- go get gopkg.in/ini.v1
-
-使用最新版:
-
- go get github.com/go-ini/ini
-
-如需更新请添加 `-u` 选项。
-
-### 测试安装
-
-如果您想要在自己的机器上运行测试,请使用 `-t` 标记:
-
- go get -t gopkg.in/ini.v1
-
-如需更新请添加 `-u` 选项。
-
-## 开始使用
-
-### 从数据源加载
-
-一个 **数据源** 可以是 `[]byte` 类型的原始数据,`string` 类型的文件路径或 `io.ReadCloser`。您可以加载 **任意多个** 数据源。如果您传递其它类型的数据源,则会直接返回错误。
-
-```go
-cfg, err := ini.Load([]byte("raw data"), "filename", ioutil.NopCloser(bytes.NewReader([]byte("some other data"))))
-```
-
-或者从一个空白的文件开始:
-
-```go
-cfg := ini.Empty()
-```
-
-当您在一开始无法决定需要加载哪些数据源时,仍可以使用 **Append()** 在需要的时候加载它们。
-
-```go
-err := cfg.Append("other file", []byte("other raw data"))
-```
-
-当您想要加载一系列文件,但是不能够确定其中哪些文件是不存在的,可以通过调用函数 `LooseLoad` 来忽略它们(`Load` 会因为文件不存在而返回错误):
-
-```go
-cfg, err := ini.LooseLoad("filename", "filename_404")
-```
-
-更牛逼的是,当那些之前不存在的文件在重新调用 `Reload` 方法的时候突然出现了,那么它们会被正常加载。
-
-#### 忽略键名的大小写
-
-有时候分区和键的名称大小写混合非常烦人,这个时候就可以通过 `InsensitiveLoad` 将所有分区和键名在读取里强制转换为小写:
-
-```go
-cfg, err := ini.InsensitiveLoad("filename")
-//...
-
-// sec1 和 sec2 指向同一个分区对象
-sec1, err := cfg.GetSection("Section")
-sec2, err := cfg.GetSection("SecTIOn")
-
-// key1 和 key2 指向同一个键对象
-key1, err := cfg.GetKey("Key")
-key2, err := cfg.GetKey("KeY")
-```
-
-#### 类似 MySQL 配置中的布尔值键
-
-MySQL 的配置文件中会出现没有具体值的布尔类型的键:
-
-```ini
-[mysqld]
-...
-skip-host-cache
-skip-name-resolve
-```
-
-默认情况下这被认为是缺失值而无法完成解析,但可以通过高级的加载选项对它们进行处理:
-
-```go
-cfg, err := LoadSources(LoadOptions{AllowBooleanKeys: true}, "my.cnf"))
-```
-
-这些键的值永远为 `true`,且在保存到文件时也只会输出键名。
-
-如果您想要通过程序来生成此类键,则可以使用 `NewBooleanKey`:
-
-```go
-key, err := sec.NewBooleanKey("skip-host-cache")
-```
-
-#### 关于注释
-
-下述几种情况的内容将被视为注释:
-
-1. 所有以 `#` 或 `;` 开头的行
-2. 所有在 `#` 或 `;` 之后的内容
-3. 分区标签后的文字 (即 `[分区名]` 之后的内容)
-
-如果你希望使用包含 `#` 或 `;` 的值,请使用 ``` ` ``` 或 ``` """ ``` 进行包覆。
-
-### 操作分区(Section)
-
-获取指定分区:
-
-```go
-section, err := cfg.GetSection("section name")
-```
-
-如果您想要获取默认分区,则可以用空字符串代替分区名:
-
-```go
-section, err := cfg.GetSection("")
-```
-
-当您非常确定某个分区是存在的,可以使用以下简便方法:
-
-```go
-section := cfg.Section("section name")
-```
-
-如果不小心判断错了,要获取的分区其实是不存在的,那会发生什么呢?没事的,它会自动创建并返回一个对应的分区对象给您。
-
-创建一个分区:
-
-```go
-err := cfg.NewSection("new section")
-```
-
-获取所有分区对象或名称:
-
-```go
-sections := cfg.Sections()
-names := cfg.SectionStrings()
-```
-
-### 操作键(Key)
-
-获取某个分区下的键:
-
-```go
-key, err := cfg.Section("").GetKey("key name")
-```
-
-和分区一样,您也可以直接获取键而忽略错误处理:
-
-```go
-key := cfg.Section("").Key("key name")
-```
-
-判断某个键是否存在:
-
-```go
-yes := cfg.Section("").HasKey("key name")
-```
-
-创建一个新的键:
-
-```go
-err := cfg.Section("").NewKey("name", "value")
-```
-
-获取分区下的所有键或键名:
-
-```go
-keys := cfg.Section("").Keys()
-names := cfg.Section("").KeyStrings()
-```
-
-获取分区下的所有键值对的克隆:
-
-```go
-hash := cfg.Section("").KeysHash()
-```
-
-### 操作键值(Value)
-
-获取一个类型为字符串(string)的值:
-
-```go
-val := cfg.Section("").Key("key name").String()
-```
-
-获取值的同时通过自定义函数进行处理验证:
-
-```go
-val := cfg.Section("").Key("key name").Validate(func(in string) string {
- if len(in) == 0 {
- return "default"
- }
- return in
-})
-```
-
-如果您不需要任何对值的自动转变功能(例如递归读取),可以直接获取原值(这种方式性能最佳):
-
-```go
-val := cfg.Section("").Key("key name").Value()
-```
-
-判断某个原值是否存在:
-
-```go
-yes := cfg.Section("").HasValue("test value")
-```
-
-获取其它类型的值:
-
-```go
-// 布尔值的规则:
-// true 当值为:1, t, T, TRUE, true, True, YES, yes, Yes, y, ON, on, On
-// false 当值为:0, f, F, FALSE, false, False, NO, no, No, n, OFF, off, Off
-v, err = cfg.Section("").Key("BOOL").Bool()
-v, err = cfg.Section("").Key("FLOAT64").Float64()
-v, err = cfg.Section("").Key("INT").Int()
-v, err = cfg.Section("").Key("INT64").Int64()
-v, err = cfg.Section("").Key("UINT").Uint()
-v, err = cfg.Section("").Key("UINT64").Uint64()
-v, err = cfg.Section("").Key("TIME").TimeFormat(time.RFC3339)
-v, err = cfg.Section("").Key("TIME").Time() // RFC3339
-
-v = cfg.Section("").Key("BOOL").MustBool()
-v = cfg.Section("").Key("FLOAT64").MustFloat64()
-v = cfg.Section("").Key("INT").MustInt()
-v = cfg.Section("").Key("INT64").MustInt64()
-v = cfg.Section("").Key("UINT").MustUint()
-v = cfg.Section("").Key("UINT64").MustUint64()
-v = cfg.Section("").Key("TIME").MustTimeFormat(time.RFC3339)
-v = cfg.Section("").Key("TIME").MustTime() // RFC3339
-
-// 由 Must 开头的方法名允许接收一个相同类型的参数来作为默认值,
-// 当键不存在或者转换失败时,则会直接返回该默认值。
-// 但是,MustString 方法必须传递一个默认值。
-
-v = cfg.Seciont("").Key("String").MustString("default")
-v = cfg.Section("").Key("BOOL").MustBool(true)
-v = cfg.Section("").Key("FLOAT64").MustFloat64(1.25)
-v = cfg.Section("").Key("INT").MustInt(10)
-v = cfg.Section("").Key("INT64").MustInt64(99)
-v = cfg.Section("").Key("UINT").MustUint(3)
-v = cfg.Section("").Key("UINT64").MustUint64(6)
-v = cfg.Section("").Key("TIME").MustTimeFormat(time.RFC3339, time.Now())
-v = cfg.Section("").Key("TIME").MustTime(time.Now()) // RFC3339
-```
-
-如果我的值有好多行怎么办?
-
-```ini
-[advance]
-ADDRESS = """404 road,
-NotFound, State, 5000
-Earth"""
-```
-
-嗯哼?小 case!
-
-```go
-cfg.Section("advance").Key("ADDRESS").String()
-
-/* --- start ---
-404 road,
-NotFound, State, 5000
-Earth
------- end --- */
-```
-
-赞爆了!那要是我属于一行的内容写不下想要写到第二行怎么办?
-
-```ini
-[advance]
-two_lines = how about \
- continuation lines?
-lots_of_lines = 1 \
- 2 \
- 3 \
- 4
-```
-
-简直是小菜一碟!
-
-```go
-cfg.Section("advance").Key("two_lines").String() // how about continuation lines?
-cfg.Section("advance").Key("lots_of_lines").String() // 1 2 3 4
-```
-
-可是我有时候觉得两行连在一起特别没劲,怎么才能不自动连接两行呢?
-
-```go
-cfg, err := ini.LoadSources(ini.LoadOptions{
- IgnoreContinuation: true,
-}, "filename")
-```
-
-哇靠给力啊!
-
-需要注意的是,值两侧的单引号会被自动剔除:
-
-```ini
-foo = "some value" // foo: some value
-bar = 'some value' // bar: some value
-```
-
-这就是全部了?哈哈,当然不是。
-
-#### 操作键值的辅助方法
-
-获取键值时设定候选值:
-
-```go
-v = cfg.Section("").Key("STRING").In("default", []string{"str", "arr", "types"})
-v = cfg.Section("").Key("FLOAT64").InFloat64(1.1, []float64{1.25, 2.5, 3.75})
-v = cfg.Section("").Key("INT").InInt(5, []int{10, 20, 30})
-v = cfg.Section("").Key("INT64").InInt64(10, []int64{10, 20, 30})
-v = cfg.Section("").Key("UINT").InUint(4, []int{3, 6, 9})
-v = cfg.Section("").Key("UINT64").InUint64(8, []int64{3, 6, 9})
-v = cfg.Section("").Key("TIME").InTimeFormat(time.RFC3339, time.Now(), []time.Time{time1, time2, time3})
-v = cfg.Section("").Key("TIME").InTime(time.Now(), []time.Time{time1, time2, time3}) // RFC3339
-```
-
-如果获取到的值不是候选值的任意一个,则会返回默认值,而默认值不需要是候选值中的一员。
-
-验证获取的值是否在指定范围内:
-
-```go
-vals = cfg.Section("").Key("FLOAT64").RangeFloat64(0.0, 1.1, 2.2)
-vals = cfg.Section("").Key("INT").RangeInt(0, 10, 20)
-vals = cfg.Section("").Key("INT64").RangeInt64(0, 10, 20)
-vals = cfg.Section("").Key("UINT").RangeUint(0, 3, 9)
-vals = cfg.Section("").Key("UINT64").RangeUint64(0, 3, 9)
-vals = cfg.Section("").Key("TIME").RangeTimeFormat(time.RFC3339, time.Now(), minTime, maxTime)
-vals = cfg.Section("").Key("TIME").RangeTime(time.Now(), minTime, maxTime) // RFC3339
-```
-
-##### 自动分割键值到切片(slice)
-
-当存在无效输入时,使用零值代替:
-
-```go
-// Input: 1.1, 2.2, 3.3, 4.4 -> [1.1 2.2 3.3 4.4]
-// Input: how, 2.2, are, you -> [0.0 2.2 0.0 0.0]
-vals = cfg.Section("").Key("STRINGS").Strings(",")
-vals = cfg.Section("").Key("FLOAT64S").Float64s(",")
-vals = cfg.Section("").Key("INTS").Ints(",")
-vals = cfg.Section("").Key("INT64S").Int64s(",")
-vals = cfg.Section("").Key("UINTS").Uints(",")
-vals = cfg.Section("").Key("UINT64S").Uint64s(",")
-vals = cfg.Section("").Key("TIMES").Times(",")
-```
-
-从结果切片中剔除无效输入:
-
-```go
-// Input: 1.1, 2.2, 3.3, 4.4 -> [1.1 2.2 3.3 4.4]
-// Input: how, 2.2, are, you -> [2.2]
-vals = cfg.Section("").Key("FLOAT64S").ValidFloat64s(",")
-vals = cfg.Section("").Key("INTS").ValidInts(",")
-vals = cfg.Section("").Key("INT64S").ValidInt64s(",")
-vals = cfg.Section("").Key("UINTS").ValidUints(",")
-vals = cfg.Section("").Key("UINT64S").ValidUint64s(",")
-vals = cfg.Section("").Key("TIMES").ValidTimes(",")
-```
-
-当存在无效输入时,直接返回错误:
-
-```go
-// Input: 1.1, 2.2, 3.3, 4.4 -> [1.1 2.2 3.3 4.4]
-// Input: how, 2.2, are, you -> error
-vals = cfg.Section("").Key("FLOAT64S").StrictFloat64s(",")
-vals = cfg.Section("").Key("INTS").StrictInts(",")
-vals = cfg.Section("").Key("INT64S").StrictInt64s(",")
-vals = cfg.Section("").Key("UINTS").StrictUints(",")
-vals = cfg.Section("").Key("UINT64S").StrictUint64s(",")
-vals = cfg.Section("").Key("TIMES").StrictTimes(",")
-```
-
-### 保存配置
-
-终于到了这个时刻,是时候保存一下配置了。
-
-比较原始的做法是输出配置到某个文件:
-
-```go
-// ...
-err = cfg.SaveTo("my.ini")
-err = cfg.SaveToIndent("my.ini", "\t")
-```
-
-另一个比较高级的做法是写入到任何实现 `io.Writer` 接口的对象中:
-
-```go
-// ...
-cfg.WriteTo(writer)
-cfg.WriteToIndent(writer, "\t")
-```
-
-默认情况下,空格将被用于对齐键值之间的等号以美化输出结果,以下代码可以禁用该功能:
-
-```go
-ini.PrettyFormat = false
-```
-
-## 高级用法
-
-### 递归读取键值
-
-在获取所有键值的过程中,特殊语法 `%(<name>)s` 会被应用,其中 `<name>` 可以是相同分区或者默认分区下的键名。字符串 `%(<name>)s` 会被相应的键值所替代,如果指定的键不存在,则会用空字符串替代。您可以最多使用 99 层的递归嵌套。
-
-```ini
-NAME = ini
-
-[author]
-NAME = Unknwon
-GITHUB = https://github.com/%(NAME)s
-
-[package]
-FULL_NAME = github.com/go-ini/%(NAME)s
-```
-
-```go
-cfg.Section("author").Key("GITHUB").String() // https://github.com/Unknwon
-cfg.Section("package").Key("FULL_NAME").String() // github.com/go-ini/ini
-```
-
-### 读取父子分区
-
-您可以在分区名称中使用 `.` 来表示两个或多个分区之间的父子关系。如果某个键在子分区中不存在,则会去它的父分区中再次寻找,直到没有父分区为止。
-
-```ini
-NAME = ini
-VERSION = v1
-IMPORT_PATH = gopkg.in/%(NAME)s.%(VERSION)s
-
-[package]
-CLONE_URL = https://%(IMPORT_PATH)s
-
-[package.sub]
-```
-
-```go
-cfg.Section("package.sub").Key("CLONE_URL").String() // https://gopkg.in/ini.v1
-```
-
-#### 获取上级父分区下的所有键名
-
-```go
-cfg.Section("package.sub").ParentKeys() // ["CLONE_URL"]
-```
-
-### 无法解析的分区
-
-如果遇到一些比较特殊的分区,它们不包含常见的键值对,而是没有固定格式的纯文本,则可以使用 `LoadOptions.UnparsableSections` 进行处理:
-
-```go
-cfg, err := LoadSources(LoadOptions{UnparseableSections: []string{"COMMENTS"}}, `[COMMENTS]
-<1><L.Slide#2> This slide has the fuel listed in the wrong units <e.1>`))
-
-body := cfg.Section("COMMENTS").Body()
-
-/* --- start ---
-<1><L.Slide#2> This slide has the fuel listed in the wrong units <e.1>
------- end --- */
-```
-
-### 读取自增键名
-
-如果数据源中的键名为 `-`,则认为该键使用了自增键名的特殊语法。计数器从 1 开始,并且分区之间是相互独立的。
-
-```ini
-[features]
--: Support read/write comments of keys and sections
--: Support auto-increment of key names
--: Support load multiple files to overwrite key values
-```
-
-```go
-cfg.Section("features").KeyStrings() // []{"#1", "#2", "#3"}
-```
-
-### 映射到结构
-
-想要使用更加面向对象的方式玩转 INI 吗?好主意。
-
-```ini
-Name = Unknwon
-age = 21
-Male = true
-Born = 1993-01-01T20:17:05Z
-
-[Note]
-Content = Hi is a good man!
-Cities = HangZhou, Boston
-```
-
-```go
-type Note struct {
- Content string
- Cities []string
-}
-
-type Person struct {
- Name string
- Age int `ini:"age"`
- Male bool
- Born time.Time
- Note
- Created time.Time `ini:"-"`
-}
-
-func main() {
- cfg, err := ini.Load("path/to/ini")
- // ...
- p := new(Person)
- err = cfg.MapTo(p)
- // ...
-
- // 一切竟可以如此的简单。
- err = ini.MapTo(p, "path/to/ini")
- // ...
-
- // 嗯哼?只需要映射一个分区吗?
- n := new(Note)
- err = cfg.Section("Note").MapTo(n)
- // ...
-}
-```
-
-结构的字段怎么设置默认值呢?很简单,只要在映射之前对指定字段进行赋值就可以了。如果键未找到或者类型错误,该值不会发生改变。
-
-```go
-// ...
-p := &Person{
- Name: "Joe",
-}
-// ...
-```
-
-这样玩 INI 真的好酷啊!然而,如果不能还给我原来的配置文件,有什么卵用?
-
-### 从结构反射
-
-可是,我有说不能吗?
-
-```go
-type Embeded struct {
- Dates []time.Time `delim:"|"`
- Places []string `ini:"places,omitempty"`
- None []int `ini:",omitempty"`
-}
-
-type Author struct {
- Name string `ini:"NAME"`
- Male bool
- Age int
- GPA float64
- NeverMind string `ini:"-"`
- *Embeded
-}
-
-func main() {
- a := &Author{"Unknwon", true, 21, 2.8, "",
- &Embeded{
- []time.Time{time.Now(), time.Now()},
- []string{"HangZhou", "Boston"},
- []int{},
- }}
- cfg := ini.Empty()
- err = ini.ReflectFrom(cfg, a)
- // ...
-}
-```
-
-瞧瞧,奇迹发生了。
-
-```ini
-NAME = Unknwon
-Male = true
-Age = 21
-GPA = 2.8
-
-[Embeded]
-Dates = 2015-08-07T22:14:22+08:00|2015-08-07T22:14:22+08:00
-places = HangZhou,Boston
-```
-
-#### 名称映射器(Name Mapper)
-
-为了节省您的时间并简化代码,本库支持类型为 [`NameMapper`](https://gowalker.org/gopkg.in/ini.v1#NameMapper) 的名称映射器,该映射器负责结构字段名与分区名和键名之间的映射。
-
-目前有 2 款内置的映射器:
-
-- `AllCapsUnderscore`:该映射器将字段名转换至格式 `ALL_CAPS_UNDERSCORE` 后再去匹配分区名和键名。
-- `TitleUnderscore`:该映射器将字段名转换至格式 `title_underscore` 后再去匹配分区名和键名。
-
-使用方法:
-
-```go
-type Info struct{
- PackageName string
-}
-
-func main() {
- err = ini.MapToWithMapper(&Info{}, ini.TitleUnderscore, []byte("package_name=ini"))
- // ...
-
- cfg, err := ini.Load([]byte("PACKAGE_NAME=ini"))
- // ...
- info := new(Info)
- cfg.NameMapper = ini.AllCapsUnderscore
- err = cfg.MapTo(info)
- // ...
-}
-```
-
-使用函数 `ini.ReflectFromWithMapper` 时也可应用相同的规则。
-
-#### 值映射器(Value Mapper)
-
-值映射器允许使用一个自定义函数自动展开值的具体内容,例如:运行时获取环境变量:
-
-```go
-type Env struct {
- Foo string `ini:"foo"`
-}
-
-func main() {
- cfg, err := ini.Load([]byte("[env]\nfoo = ${MY_VAR}\n")
- cfg.ValueMapper = os.ExpandEnv
- // ...
- env := &Env{}
- err = cfg.Section("env").MapTo(env)
-}
-```
-
-本例中,`env.Foo` 将会是运行时所获取到环境变量 `MY_VAR` 的值。
-
-#### 映射/反射的其它说明
-
-任何嵌入的结构都会被默认认作一个不同的分区,并且不会自动产生所谓的父子分区关联:
-
-```go
-type Child struct {
- Age string
-}
-
-type Parent struct {
- Name string
- Child
-}
-
-type Config struct {
- City string
- Parent
-}
-```
-
-示例配置文件:
-
-```ini
-City = Boston
-
-[Parent]
-Name = Unknwon
-
-[Child]
-Age = 21
-```
-
-很好,但是,我就是要嵌入结构也在同一个分区。好吧,你爹是李刚!
-
-```go
-type Child struct {
- Age string
-}
-
-type Parent struct {
- Name string
- Child `ini:"Parent"`
-}
-
-type Config struct {
- City string
- Parent
-}
-```
-
-示例配置文件:
-
-```ini
-City = Boston
-
-[Parent]
-Name = Unknwon
-Age = 21
-```
-
-## 获取帮助
-
-- [API 文档](https://gowalker.org/gopkg.in/ini.v1)
-- [创建工单](https://github.com/go-ini/ini/issues/new)
-
-## 常见问题
-
-### 字段 `BlockMode` 是什么?
-
-默认情况下,本库会在您进行读写操作时采用锁机制来确保数据时间。但在某些情况下,您非常确定只进行读操作。此时,您可以通过设置 `cfg.BlockMode = false` 来将读操作提升大约 **50-70%** 的性能。
-
-### 为什么要写另一个 INI 解析库?
-
-许多人都在使用我的 [goconfig](https://github.com/Unknwon/goconfig) 来完成对 INI 文件的操作,但我希望使用更加 Go 风格的代码。并且当您设置 `cfg.BlockMode = false` 时,会有大约 **10-30%** 的性能提升。
-
-为了做出这些改变,我必须对 API 进行破坏,所以新开一个仓库是最安全的做法。除此之外,本库直接使用 `gopkg.in` 来进行版本化发布。(其实真相是导入路径更短了)
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/error.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/error.go
deleted file mode 100644
index 80afe743..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/error.go
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright 2016 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "fmt"
-)
-
-type ErrDelimiterNotFound struct {
- Line string
-}
-
-func IsErrDelimiterNotFound(err error) bool {
- _, ok := err.(ErrDelimiterNotFound)
- return ok
-}
-
-func (err ErrDelimiterNotFound) Error() string {
- return fmt.Sprintf("key-value delimiter not found: %s", err.Line)
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/ini.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/ini.go
deleted file mode 100644
index 68d73aa7..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/ini.go
+++ /dev/null
@@ -1,549 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-// Package ini provides INI file read and write functionality in Go.
-package ini
-
-import (
- "bytes"
- "errors"
- "fmt"
- "io"
- "io/ioutil"
- "os"
- "regexp"
- "runtime"
- "strconv"
- "strings"
- "sync"
- "time"
-)
-
-const (
- // Name for default section. You can use this constant or the string literal.
- // In most of cases, an empty string is all you need to access the section.
- DEFAULT_SECTION = "DEFAULT"
-
- // Maximum allowed depth when recursively substituing variable names.
- _DEPTH_VALUES = 99
- _VERSION = "1.25.4"
-)
-
-// Version returns current package version literal.
-func Version() string {
- return _VERSION
-}
-
-var (
- // Delimiter to determine or compose a new line.
- // This variable will be changed to "\r\n" automatically on Windows
- // at package init time.
- LineBreak = "\n"
-
- // Variable regexp pattern: %(variable)s
- varPattern = regexp.MustCompile(`%\(([^\)]+)\)s`)
-
- // Indicate whether to align "=" sign with spaces to produce pretty output
- // or reduce all possible spaces for compact format.
- PrettyFormat = true
-
- // Explicitly write DEFAULT section header
- DefaultHeader = false
-)
-
-func init() {
- if runtime.GOOS == "windows" {
- LineBreak = "\r\n"
- }
-}
-
-func inSlice(str string, s []string) bool {
- for _, v := range s {
- if str == v {
- return true
- }
- }
- return false
-}
-
-// dataSource is an interface that returns object which can be read and closed.
-type dataSource interface {
- ReadCloser() (io.ReadCloser, error)
-}
-
-// sourceFile represents an object that contains content on the local file system.
-type sourceFile struct {
- name string
-}
-
-func (s sourceFile) ReadCloser() (_ io.ReadCloser, err error) {
- return os.Open(s.name)
-}
-
-type bytesReadCloser struct {
- reader io.Reader
-}
-
-func (rc *bytesReadCloser) Read(p []byte) (n int, err error) {
- return rc.reader.Read(p)
-}
-
-func (rc *bytesReadCloser) Close() error {
- return nil
-}
-
-// sourceData represents an object that contains content in memory.
-type sourceData struct {
- data []byte
-}
-
-func (s *sourceData) ReadCloser() (io.ReadCloser, error) {
- return ioutil.NopCloser(bytes.NewReader(s.data)), nil
-}
-
-// sourceReadCloser represents an input stream with Close method.
-type sourceReadCloser struct {
- reader io.ReadCloser
-}
-
-func (s *sourceReadCloser) ReadCloser() (io.ReadCloser, error) {
- return s.reader, nil
-}
-
-// File represents a combination of a or more INI file(s) in memory.
-type File struct {
- // Should make things safe, but sometimes doesn't matter.
- BlockMode bool
- // Make sure data is safe in multiple goroutines.
- lock sync.RWMutex
-
- // Allow combination of multiple data sources.
- dataSources []dataSource
- // Actual data is stored here.
- sections map[string]*Section
-
- // To keep data in order.
- sectionList []string
-
- options LoadOptions
-
- NameMapper
- ValueMapper
-}
-
-// newFile initializes File object with given data sources.
-func newFile(dataSources []dataSource, opts LoadOptions) *File {
- return &File{
- BlockMode: true,
- dataSources: dataSources,
- sections: make(map[string]*Section),
- sectionList: make([]string, 0, 10),
- options: opts,
- }
-}
-
-func parseDataSource(source interface{}) (dataSource, error) {
- switch s := source.(type) {
- case string:
- return sourceFile{s}, nil
- case []byte:
- return &sourceData{s}, nil
- case io.ReadCloser:
- return &sourceReadCloser{s}, nil
- default:
- return nil, fmt.Errorf("error parsing data source: unknown type '%s'", s)
- }
-}
-
-type LoadOptions struct {
- // Loose indicates whether the parser should ignore nonexistent files or return error.
- Loose bool
- // Insensitive indicates whether the parser forces all section and key names to lowercase.
- Insensitive bool
- // IgnoreContinuation indicates whether to ignore continuation lines while parsing.
- IgnoreContinuation bool
- // AllowBooleanKeys indicates whether to allow boolean type keys or treat as value is missing.
- // This type of keys are mostly used in my.cnf.
- AllowBooleanKeys bool
- // AllowShadows indicates whether to keep track of keys with same name under same section.
- AllowShadows bool
- // Some INI formats allow group blocks that store a block of raw content that doesn't otherwise
- // conform to key/value pairs. Specify the names of those blocks here.
- UnparseableSections []string
-}
-
-func LoadSources(opts LoadOptions, source interface{}, others ...interface{}) (_ *File, err error) {
- sources := make([]dataSource, len(others)+1)
- sources[0], err = parseDataSource(source)
- if err != nil {
- return nil, err
- }
- for i := range others {
- sources[i+1], err = parseDataSource(others[i])
- if err != nil {
- return nil, err
- }
- }
- f := newFile(sources, opts)
- if err = f.Reload(); err != nil {
- return nil, err
- }
- return f, nil
-}
-
-// Load loads and parses from INI data sources.
-// Arguments can be mixed of file name with string type, or raw data in []byte.
-// It will return error if list contains nonexistent files.
-func Load(source interface{}, others ...interface{}) (*File, error) {
- return LoadSources(LoadOptions{}, source, others...)
-}
-
-// LooseLoad has exactly same functionality as Load function
-// except it ignores nonexistent files instead of returning error.
-func LooseLoad(source interface{}, others ...interface{}) (*File, error) {
- return LoadSources(LoadOptions{Loose: true}, source, others...)
-}
-
-// InsensitiveLoad has exactly same functionality as Load function
-// except it forces all section and key names to be lowercased.
-func InsensitiveLoad(source interface{}, others ...interface{}) (*File, error) {
- return LoadSources(LoadOptions{Insensitive: true}, source, others...)
-}
-
-// InsensitiveLoad has exactly same functionality as Load function
-// except it allows have shadow keys.
-func ShadowLoad(source interface{}, others ...interface{}) (*File, error) {
- return LoadSources(LoadOptions{AllowShadows: true}, source, others...)
-}
-
-// Empty returns an empty file object.
-func Empty() *File {
- // Ignore error here, we sure our data is good.
- f, _ := Load([]byte(""))
- return f
-}
-
-// NewSection creates a new section.
-func (f *File) NewSection(name string) (*Section, error) {
- if len(name) == 0 {
- return nil, errors.New("error creating new section: empty section name")
- } else if f.options.Insensitive && name != DEFAULT_SECTION {
- name = strings.ToLower(name)
- }
-
- if f.BlockMode {
- f.lock.Lock()
- defer f.lock.Unlock()
- }
-
- if inSlice(name, f.sectionList) {
- return f.sections[name], nil
- }
-
- f.sectionList = append(f.sectionList, name)
- f.sections[name] = newSection(f, name)
- return f.sections[name], nil
-}
-
-// NewRawSection creates a new section with an unparseable body.
-func (f *File) NewRawSection(name, body string) (*Section, error) {
- section, err := f.NewSection(name)
- if err != nil {
- return nil, err
- }
-
- section.isRawSection = true
- section.rawBody = body
- return section, nil
-}
-
-// NewSections creates a list of sections.
-func (f *File) NewSections(names ...string) (err error) {
- for _, name := range names {
- if _, err = f.NewSection(name); err != nil {
- return err
- }
- }
- return nil
-}
-
-// GetSection returns section by given name.
-func (f *File) GetSection(name string) (*Section, error) {
- if len(name) == 0 {
- name = DEFAULT_SECTION
- } else if f.options.Insensitive {
- name = strings.ToLower(name)
- }
-
- if f.BlockMode {
- f.lock.RLock()
- defer f.lock.RUnlock()
- }
-
- sec := f.sections[name]
- if sec == nil {
- return nil, fmt.Errorf("section '%s' does not exist", name)
- }
- return sec, nil
-}
-
-// Section assumes named section exists and returns a zero-value when not.
-func (f *File) Section(name string) *Section {
- sec, err := f.GetSection(name)
- if err != nil {
- // Note: It's OK here because the only possible error is empty section name,
- // but if it's empty, this piece of code won't be executed.
- sec, _ = f.NewSection(name)
- return sec
- }
- return sec
-}
-
-// Section returns list of Section.
-func (f *File) Sections() []*Section {
- sections := make([]*Section, len(f.sectionList))
- for i := range f.sectionList {
- sections[i] = f.Section(f.sectionList[i])
- }
- return sections
-}
-
-// SectionStrings returns list of section names.
-func (f *File) SectionStrings() []string {
- list := make([]string, len(f.sectionList))
- copy(list, f.sectionList)
- return list
-}
-
-// DeleteSection deletes a section.
-func (f *File) DeleteSection(name string) {
- if f.BlockMode {
- f.lock.Lock()
- defer f.lock.Unlock()
- }
-
- if len(name) == 0 {
- name = DEFAULT_SECTION
- }
-
- for i, s := range f.sectionList {
- if s == name {
- f.sectionList = append(f.sectionList[:i], f.sectionList[i+1:]...)
- delete(f.sections, name)
- return
- }
- }
-}
-
-func (f *File) reload(s dataSource) error {
- r, err := s.ReadCloser()
- if err != nil {
- return err
- }
- defer r.Close()
-
- return f.parse(r)
-}
-
-// Reload reloads and parses all data sources.
-func (f *File) Reload() (err error) {
- for _, s := range f.dataSources {
- if err = f.reload(s); err != nil {
- // In loose mode, we create an empty default section for nonexistent files.
- if os.IsNotExist(err) && f.options.Loose {
- f.parse(bytes.NewBuffer(nil))
- continue
- }
- return err
- }
- }
- return nil
-}
-
-// Append appends one or more data sources and reloads automatically.
-func (f *File) Append(source interface{}, others ...interface{}) error {
- ds, err := parseDataSource(source)
- if err != nil {
- return err
- }
- f.dataSources = append(f.dataSources, ds)
- for _, s := range others {
- ds, err = parseDataSource(s)
- if err != nil {
- return err
- }
- f.dataSources = append(f.dataSources, ds)
- }
- return f.Reload()
-}
-
-// WriteToIndent writes content into io.Writer with given indention.
-// If PrettyFormat has been set to be true,
-// it will align "=" sign with spaces under each section.
-func (f *File) WriteToIndent(w io.Writer, indent string) (n int64, err error) {
- equalSign := "="
- if PrettyFormat {
- equalSign = " = "
- }
-
- // Use buffer to make sure target is safe until finish encoding.
- buf := bytes.NewBuffer(nil)
- for i, sname := range f.sectionList {
- sec := f.Section(sname)
- if len(sec.Comment) > 0 {
- if sec.Comment[0] != '#' && sec.Comment[0] != ';' {
- sec.Comment = "; " + sec.Comment
- }
- if _, err = buf.WriteString(sec.Comment + LineBreak); err != nil {
- return 0, err
- }
- }
-
- if i > 0 || DefaultHeader {
- if _, err = buf.WriteString("[" + sname + "]" + LineBreak); err != nil {
- return 0, err
- }
- } else {
- // Write nothing if default section is empty
- if len(sec.keyList) == 0 {
- continue
- }
- }
-
- if sec.isRawSection {
- if _, err = buf.WriteString(sec.rawBody); err != nil {
- return 0, err
- }
- continue
- }
-
- // Count and generate alignment length and buffer spaces using the
- // longest key. Keys may be modifed if they contain certain characters so
- // we need to take that into account in our calculation.
- alignLength := 0
- if PrettyFormat {
- for _, kname := range sec.keyList {
- keyLength := len(kname)
- // First case will surround key by ` and second by """
- if strings.ContainsAny(kname, "\"=:") {
- keyLength += 2
- } else if strings.Contains(kname, "`") {
- keyLength += 6
- }
-
- if keyLength > alignLength {
- alignLength = keyLength
- }
- }
- }
- alignSpaces := bytes.Repeat([]byte(" "), alignLength)
-
- KEY_LIST:
- for _, kname := range sec.keyList {
- key := sec.Key(kname)
- if len(key.Comment) > 0 {
- if len(indent) > 0 && sname != DEFAULT_SECTION {
- buf.WriteString(indent)
- }
- if key.Comment[0] != '#' && key.Comment[0] != ';' {
- key.Comment = "; " + key.Comment
- }
- if _, err = buf.WriteString(key.Comment + LineBreak); err != nil {
- return 0, err
- }
- }
-
- if len(indent) > 0 && sname != DEFAULT_SECTION {
- buf.WriteString(indent)
- }
-
- switch {
- case key.isAutoIncrement:
- kname = "-"
- case strings.ContainsAny(kname, "\"=:"):
- kname = "`" + kname + "`"
- case strings.Contains(kname, "`"):
- kname = `"""` + kname + `"""`
- }
-
- for _, val := range key.ValueWithShadows() {
- if _, err = buf.WriteString(kname); err != nil {
- return 0, err
- }
-
- if key.isBooleanType {
- if kname != sec.keyList[len(sec.keyList)-1] {
- buf.WriteString(LineBreak)
- }
- continue KEY_LIST
- }
-
- // Write out alignment spaces before "=" sign
- if PrettyFormat {
- buf.Write(alignSpaces[:alignLength-len(kname)])
- }
-
- // In case key value contains "\n", "`", "\"", "#" or ";"
- if strings.ContainsAny(val, "\n`") {
- val = `"""` + val + `"""`
- } else if strings.ContainsAny(val, "#;") {
- val = "`" + val + "`"
- }
- if _, err = buf.WriteString(equalSign + val + LineBreak); err != nil {
- return 0, err
- }
- }
- }
-
- // Put a line between sections
- if _, err = buf.WriteString(LineBreak); err != nil {
- return 0, err
- }
- }
-
- return buf.WriteTo(w)
-}
-
-// WriteTo writes file content into io.Writer.
-func (f *File) WriteTo(w io.Writer) (int64, error) {
- return f.WriteToIndent(w, "")
-}
-
-// SaveToIndent writes content to file system with given value indention.
-func (f *File) SaveToIndent(filename, indent string) error {
- // Note: Because we are truncating with os.Create,
- // so it's safer to save to a temporary file location and rename afte done.
- tmpPath := filename + "." + strconv.Itoa(time.Now().Nanosecond()) + ".tmp"
- defer os.Remove(tmpPath)
-
- fw, err := os.Create(tmpPath)
- if err != nil {
- return err
- }
-
- if _, err = f.WriteToIndent(fw, indent); err != nil {
- fw.Close()
- return err
- }
- fw.Close()
-
- // Remove old file and rename the new one.
- os.Remove(filename)
- return os.Rename(tmpPath, filename)
-}
-
-// SaveTo writes content to file system.
-func (f *File) SaveTo(filename string) error {
- return f.SaveToIndent(filename, "")
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/ini_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/ini_test.go
deleted file mode 100644
index 00b1baa8..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/ini_test.go
+++ /dev/null
@@ -1,449 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "bytes"
- "io/ioutil"
- "strings"
- "testing"
- "time"
-
- . "github.com/smartystreets/goconvey/convey"
-)
-
-func Test_Version(t *testing.T) {
- Convey("Get version", t, func() {
- So(Version(), ShouldEqual, _VERSION)
- })
-}
-
-const _CONF_DATA = `
-; Package name
-NAME = ini
-; Package version
-VERSION = v1
-; Package import path
-IMPORT_PATH = gopkg.in/%(NAME)s.%(VERSION)s
-
-# Information about package author
-# Bio can be written in multiple lines.
-[author]
-NAME = Unknwon ; Succeeding comment
-E-MAIL = fake@localhost
-GITHUB = https://github.com/%(NAME)s
-BIO = """Gopher.
-Coding addict.
-Good man.
-""" # Succeeding comment
-
-[package]
-CLONE_URL = https://%(IMPORT_PATH)s
-
-[package.sub]
-UNUSED_KEY = should be deleted
-
-[features]
--: Support read/write comments of keys and sections
--: Support auto-increment of key names
--: Support load multiple files to overwrite key values
-
-[types]
-STRING = str
-BOOL = true
-BOOL_FALSE = false
-FLOAT64 = 1.25
-INT = 10
-TIME = 2015-01-01T20:17:05Z
-DURATION = 2h45m
-UINT = 3
-
-[array]
-STRINGS = en, zh, de
-FLOAT64S = 1.1, 2.2, 3.3
-INTS = 1, 2, 3
-UINTS = 1, 2, 3
-TIMES = 2015-01-01T20:17:05Z,2015-01-01T20:17:05Z,2015-01-01T20:17:05Z
-
-[note]
-empty_lines = next line is empty\
-
-; Comment before the section
-[comments] ; This is a comment for the section too
-; Comment before key
-key = "value"
-key2 = "value2" ; This is a comment for key2
-key3 = "one", "two", "three"
-
-[advance]
-value with quotes = "some value"
-value quote2 again = 'some value'
-includes comment sign = ` + "`" + "my#password" + "`" + `
-includes comment sign2 = ` + "`" + "my;password" + "`" + `
-true = 2+3=5
-"1+1=2" = true
-"""6+1=7""" = true
-"""` + "`" + `5+5` + "`" + `""" = 10
-` + "`" + `"6+6"` + "`" + ` = 12
-` + "`" + `7-2=4` + "`" + ` = false
-ADDRESS = ` + "`" + `404 road,
-NotFound, State, 50000` + "`" + `
-
-two_lines = how about \
- continuation lines?
-lots_of_lines = 1 \
- 2 \
- 3 \
- 4 \
-`
-
-func Test_Load(t *testing.T) {
- Convey("Load from data sources", t, func() {
-
- Convey("Load with empty data", func() {
- So(Empty(), ShouldNotBeNil)
- })
-
- Convey("Load with multiple data sources", func() {
- cfg, err := Load([]byte(_CONF_DATA), "testdata/conf.ini", ioutil.NopCloser(bytes.NewReader([]byte(_CONF_DATA))))
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- f, err := Load([]byte(_CONF_DATA), "testdata/404.ini")
- So(err, ShouldNotBeNil)
- So(f, ShouldBeNil)
- })
-
- Convey("Load with io.ReadCloser", func() {
- cfg, err := Load(ioutil.NopCloser(bytes.NewReader([]byte(_CONF_DATA))))
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- So(cfg.Section("").Key("NAME").String(), ShouldEqual, "ini")
- })
- })
-
- Convey("Bad load process", t, func() {
-
- Convey("Load from invalid data sources", func() {
- _, err := Load(_CONF_DATA)
- So(err, ShouldNotBeNil)
-
- f, err := Load("testdata/404.ini")
- So(err, ShouldNotBeNil)
- So(f, ShouldBeNil)
-
- _, err = Load(1)
- So(err, ShouldNotBeNil)
-
- _, err = Load([]byte(""), 1)
- So(err, ShouldNotBeNil)
- })
-
- Convey("Load with bad section name", func() {
- _, err := Load([]byte("[]"))
- So(err, ShouldNotBeNil)
-
- _, err = Load([]byte("["))
- So(err, ShouldNotBeNil)
- })
-
- Convey("Load with bad keys", func() {
- _, err := Load([]byte(`"""name`))
- So(err, ShouldNotBeNil)
-
- _, err = Load([]byte(`"""name"""`))
- So(err, ShouldNotBeNil)
-
- _, err = Load([]byte(`""=1`))
- So(err, ShouldNotBeNil)
-
- _, err = Load([]byte(`=`))
- So(err, ShouldNotBeNil)
-
- _, err = Load([]byte(`name`))
- So(err, ShouldNotBeNil)
- })
-
- Convey("Load with bad values", func() {
- _, err := Load([]byte(`name="""Unknwon`))
- So(err, ShouldNotBeNil)
- })
- })
-
- Convey("Get section and key insensitively", t, func() {
- cfg, err := InsensitiveLoad([]byte(_CONF_DATA), "testdata/conf.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- sec, err := cfg.GetSection("Author")
- So(err, ShouldBeNil)
- So(sec, ShouldNotBeNil)
-
- key, err := sec.GetKey("E-mail")
- So(err, ShouldBeNil)
- So(key, ShouldNotBeNil)
- })
-
- Convey("Load with ignoring continuation lines", t, func() {
- cfg, err := LoadSources(LoadOptions{IgnoreContinuation: true}, []byte(`key1=a\b\
-key2=c\d\`))
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- So(cfg.Section("").Key("key1").String(), ShouldEqual, `a\b\`)
- So(cfg.Section("").Key("key2").String(), ShouldEqual, `c\d\`)
- })
-
- Convey("Load with boolean type keys", t, func() {
- cfg, err := LoadSources(LoadOptions{AllowBooleanKeys: true}, []byte(`key1=hello
-key2
-#key3
-key4
-key5`))
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- So(strings.Join(cfg.Section("").KeyStrings(), ","), ShouldEqual, "key1,key2,key4,key5")
- So(cfg.Section("").Key("key2").MustBool(false), ShouldBeTrue)
-
- var buf bytes.Buffer
- cfg.WriteTo(&buf)
- // there is always a trailing \n at the end of the section
- So(buf.String(), ShouldEqual, `key1 = hello
-key2
-#key3
-key4
-key5
-`)
- })
-}
-
-func Test_LooseLoad(t *testing.T) {
- Convey("Loose load from data sources", t, func() {
- Convey("Loose load mixed with nonexistent file", func() {
- cfg, err := LooseLoad("testdata/404.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
- var fake struct {
- Name string `ini:"name"`
- }
- So(cfg.MapTo(&fake), ShouldBeNil)
-
- cfg, err = LooseLoad([]byte("name=Unknwon"), "testdata/404.ini")
- So(err, ShouldBeNil)
- So(cfg.Section("").Key("name").String(), ShouldEqual, "Unknwon")
- So(cfg.MapTo(&fake), ShouldBeNil)
- So(fake.Name, ShouldEqual, "Unknwon")
- })
- })
-
-}
-
-func Test_File_Append(t *testing.T) {
- Convey("Append data sources", t, func() {
- cfg, err := Load([]byte(""))
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- So(cfg.Append([]byte(""), []byte("")), ShouldBeNil)
-
- Convey("Append bad data sources", func() {
- So(cfg.Append(1), ShouldNotBeNil)
- So(cfg.Append([]byte(""), 1), ShouldNotBeNil)
- })
- })
-}
-
-func Test_File_WriteTo(t *testing.T) {
- Convey("Write to somewhere", t, func() {
- var buf bytes.Buffer
- cfg := Empty()
- cfg.WriteTo(&buf)
- })
-}
-
-func Test_File_SaveTo_WriteTo(t *testing.T) {
- Convey("Save file", t, func() {
- cfg, err := Load([]byte(_CONF_DATA), "testdata/conf.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- cfg.Section("").Key("NAME").Comment = "Package name"
- cfg.Section("author").Comment = `Information about package author
-# Bio can be written in multiple lines.`
- cfg.Section("advanced").Key("val w/ pound").SetValue("my#password")
- cfg.Section("advanced").Key("longest key has a colon : yes/no").SetValue("yes")
- So(cfg.SaveTo("testdata/conf_out.ini"), ShouldBeNil)
-
- cfg.Section("author").Key("NAME").Comment = "This is author name"
-
- So(cfg.SaveToIndent("testdata/conf_out.ini", "\t"), ShouldBeNil)
-
- var buf bytes.Buffer
- _, err = cfg.WriteToIndent(&buf, "\t")
- So(err, ShouldBeNil)
- So(buf.String(), ShouldEqual, `; Package name
-NAME = ini
-; Package version
-VERSION = v1
-; Package import path
-IMPORT_PATH = gopkg.in/%(NAME)s.%(VERSION)s
-
-; Information about package author
-# Bio can be written in multiple lines.
-[author]
- ; This is author name
- NAME = Unknwon
- E-MAIL = u@gogs.io
- GITHUB = https://github.com/%(NAME)s
- # Succeeding comment
- BIO = """Gopher.
-Coding addict.
-Good man.
-"""
-
-[package]
- CLONE_URL = https://%(IMPORT_PATH)s
-
-[package.sub]
- UNUSED_KEY = should be deleted
-
-[features]
- - = Support read/write comments of keys and sections
- - = Support auto-increment of key names
- - = Support load multiple files to overwrite key values
-
-[types]
- STRING = str
- BOOL = true
- BOOL_FALSE = false
- FLOAT64 = 1.25
- INT = 10
- TIME = 2015-01-01T20:17:05Z
- DURATION = 2h45m
- UINT = 3
-
-[array]
- STRINGS = en, zh, de
- FLOAT64S = 1.1, 2.2, 3.3
- INTS = 1, 2, 3
- UINTS = 1, 2, 3
- TIMES = 2015-01-01T20:17:05Z,2015-01-01T20:17:05Z,2015-01-01T20:17:05Z
-
-[note]
- empty_lines = next line is empty
-
-; Comment before the section
-; This is a comment for the section too
-[comments]
- ; Comment before key
- key = value
- ; This is a comment for key2
- key2 = value2
- key3 = "one", "two", "three"
-
-[advance]
- value with quotes = some value
- value quote2 again = some value
- includes comment sign = `+"`"+"my#password"+"`"+`
- includes comment sign2 = `+"`"+"my;password"+"`"+`
- true = 2+3=5
- `+"`"+`1+1=2`+"`"+` = true
- `+"`"+`6+1=7`+"`"+` = true
- """`+"`"+`5+5`+"`"+`""" = 10
- `+"`"+`"6+6"`+"`"+` = 12
- `+"`"+`7-2=4`+"`"+` = false
- ADDRESS = """404 road,
-NotFound, State, 50000"""
- two_lines = how about continuation lines?
- lots_of_lines = 1 2 3 4
-
-[advanced]
- val w/ pound = `+"`"+`my#password`+"`"+`
- `+"`"+`longest key has a colon : yes/no`+"`"+` = yes
-
-`)
- })
-}
-
-func Test_File_WriteTo_SectionRaw(t *testing.T) {
- Convey("Write a INI with a raw section", t, func() {
- var buf bytes.Buffer
- cfg, err := LoadSources(
- LoadOptions{
- UnparseableSections: []string{"CORE_LESSON", "COMMENTS"},
- },
- "testdata/aicc.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
- cfg.WriteToIndent(&buf, "\t")
- So(buf.String(), ShouldEqual, `[Core]
- Lesson_Location = 87
- Lesson_Status = C
- Score = 3
- Time = 00:02:30
-
-[CORE_LESSON]
-my lesson state data – 1111111111111111111000000000000000001110000
-111111111111111111100000000000111000000000 – end my lesson state data
-[COMMENTS]
-<1><L.Slide#2> This slide has the fuel listed in the wrong units <e.1>
-`)
- })
-}
-
-// Helpers for slice tests.
-func float64sEqual(values []float64, expected ...float64) {
- So(values, ShouldHaveLength, len(expected))
- for i, v := range expected {
- So(values[i], ShouldEqual, v)
- }
-}
-
-func intsEqual(values []int, expected ...int) {
- So(values, ShouldHaveLength, len(expected))
- for i, v := range expected {
- So(values[i], ShouldEqual, v)
- }
-}
-
-func int64sEqual(values []int64, expected ...int64) {
- So(values, ShouldHaveLength, len(expected))
- for i, v := range expected {
- So(values[i], ShouldEqual, v)
- }
-}
-
-func uintsEqual(values []uint, expected ...uint) {
- So(values, ShouldHaveLength, len(expected))
- for i, v := range expected {
- So(values[i], ShouldEqual, v)
- }
-}
-
-func uint64sEqual(values []uint64, expected ...uint64) {
- So(values, ShouldHaveLength, len(expected))
- for i, v := range expected {
- So(values[i], ShouldEqual, v)
- }
-}
-
-func timesEqual(values []time.Time, expected ...time.Time) {
- So(values, ShouldHaveLength, len(expected))
- for i, v := range expected {
- So(values[i].String(), ShouldEqual, v.String())
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/key.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/key.go
deleted file mode 100644
index 852696f4..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/key.go
+++ /dev/null
@@ -1,703 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "errors"
- "fmt"
- "strconv"
- "strings"
- "time"
-)
-
-// Key represents a key under a section.
-type Key struct {
- s *Section
- name string
- value string
- isAutoIncrement bool
- isBooleanType bool
-
- isShadow bool
- shadows []*Key
-
- Comment string
-}
-
-// newKey simply return a key object with given values.
-func newKey(s *Section, name, val string) *Key {
- return &Key{
- s: s,
- name: name,
- value: val,
- }
-}
-
-func (k *Key) addShadow(val string) error {
- if k.isShadow {
- return errors.New("cannot add shadow to another shadow key")
- } else if k.isAutoIncrement || k.isBooleanType {
- return errors.New("cannot add shadow to auto-increment or boolean key")
- }
-
- shadow := newKey(k.s, k.name, val)
- shadow.isShadow = true
- k.shadows = append(k.shadows, shadow)
- return nil
-}
-
-// AddShadow adds a new shadow key to itself.
-func (k *Key) AddShadow(val string) error {
- if !k.s.f.options.AllowShadows {
- return errors.New("shadow key is not allowed")
- }
- return k.addShadow(val)
-}
-
-// ValueMapper represents a mapping function for values, e.g. os.ExpandEnv
-type ValueMapper func(string) string
-
-// Name returns name of key.
-func (k *Key) Name() string {
- return k.name
-}
-
-// Value returns raw value of key for performance purpose.
-func (k *Key) Value() string {
- return k.value
-}
-
-// ValueWithShadows returns raw values of key and its shadows if any.
-func (k *Key) ValueWithShadows() []string {
- if len(k.shadows) == 0 {
- return []string{k.value}
- }
- vals := make([]string, len(k.shadows)+1)
- vals[0] = k.value
- for i := range k.shadows {
- vals[i+1] = k.shadows[i].value
- }
- return vals
-}
-
-// transformValue takes a raw value and transforms to its final string.
-func (k *Key) transformValue(val string) string {
- if k.s.f.ValueMapper != nil {
- val = k.s.f.ValueMapper(val)
- }
-
- // Fail-fast if no indicate char found for recursive value
- if !strings.Contains(val, "%") {
- return val
- }
- for i := 0; i < _DEPTH_VALUES; i++ {
- vr := varPattern.FindString(val)
- if len(vr) == 0 {
- break
- }
-
- // Take off leading '%(' and trailing ')s'.
- noption := strings.TrimLeft(vr, "%(")
- noption = strings.TrimRight(noption, ")s")
-
- // Search in the same section.
- nk, err := k.s.GetKey(noption)
- if err != nil {
- // Search again in default section.
- nk, _ = k.s.f.Section("").GetKey(noption)
- }
-
- // Substitute by new value and take off leading '%(' and trailing ')s'.
- val = strings.Replace(val, vr, nk.value, -1)
- }
- return val
-}
-
-// String returns string representation of value.
-func (k *Key) String() string {
- return k.transformValue(k.value)
-}
-
-// Validate accepts a validate function which can
-// return modifed result as key value.
-func (k *Key) Validate(fn func(string) string) string {
- return fn(k.String())
-}
-
-// parseBool returns the boolean value represented by the string.
-//
-// It accepts 1, t, T, TRUE, true, True, YES, yes, Yes, y, ON, on, On,
-// 0, f, F, FALSE, false, False, NO, no, No, n, OFF, off, Off.
-// Any other value returns an error.
-func parseBool(str string) (value bool, err error) {
- switch str {
- case "1", "t", "T", "true", "TRUE", "True", "YES", "yes", "Yes", "y", "ON", "on", "On":
- return true, nil
- case "0", "f", "F", "false", "FALSE", "False", "NO", "no", "No", "n", "OFF", "off", "Off":
- return false, nil
- }
- return false, fmt.Errorf("parsing \"%s\": invalid syntax", str)
-}
-
-// Bool returns bool type value.
-func (k *Key) Bool() (bool, error) {
- return parseBool(k.String())
-}
-
-// Float64 returns float64 type value.
-func (k *Key) Float64() (float64, error) {
- return strconv.ParseFloat(k.String(), 64)
-}
-
-// Int returns int type value.
-func (k *Key) Int() (int, error) {
- return strconv.Atoi(k.String())
-}
-
-// Int64 returns int64 type value.
-func (k *Key) Int64() (int64, error) {
- return strconv.ParseInt(k.String(), 10, 64)
-}
-
-// Uint returns uint type valued.
-func (k *Key) Uint() (uint, error) {
- u, e := strconv.ParseUint(k.String(), 10, 64)
- return uint(u), e
-}
-
-// Uint64 returns uint64 type value.
-func (k *Key) Uint64() (uint64, error) {
- return strconv.ParseUint(k.String(), 10, 64)
-}
-
-// Duration returns time.Duration type value.
-func (k *Key) Duration() (time.Duration, error) {
- return time.ParseDuration(k.String())
-}
-
-// TimeFormat parses with given format and returns time.Time type value.
-func (k *Key) TimeFormat(format string) (time.Time, error) {
- return time.Parse(format, k.String())
-}
-
-// Time parses with RFC3339 format and returns time.Time type value.
-func (k *Key) Time() (time.Time, error) {
- return k.TimeFormat(time.RFC3339)
-}
-
-// MustString returns default value if key value is empty.
-func (k *Key) MustString(defaultVal string) string {
- val := k.String()
- if len(val) == 0 {
- k.value = defaultVal
- return defaultVal
- }
- return val
-}
-
-// MustBool always returns value without error,
-// it returns false if error occurs.
-func (k *Key) MustBool(defaultVal ...bool) bool {
- val, err := k.Bool()
- if len(defaultVal) > 0 && err != nil {
- k.value = strconv.FormatBool(defaultVal[0])
- return defaultVal[0]
- }
- return val
-}
-
-// MustFloat64 always returns value without error,
-// it returns 0.0 if error occurs.
-func (k *Key) MustFloat64(defaultVal ...float64) float64 {
- val, err := k.Float64()
- if len(defaultVal) > 0 && err != nil {
- k.value = strconv.FormatFloat(defaultVal[0], 'f', -1, 64)
- return defaultVal[0]
- }
- return val
-}
-
-// MustInt always returns value without error,
-// it returns 0 if error occurs.
-func (k *Key) MustInt(defaultVal ...int) int {
- val, err := k.Int()
- if len(defaultVal) > 0 && err != nil {
- k.value = strconv.FormatInt(int64(defaultVal[0]), 10)
- return defaultVal[0]
- }
- return val
-}
-
-// MustInt64 always returns value without error,
-// it returns 0 if error occurs.
-func (k *Key) MustInt64(defaultVal ...int64) int64 {
- val, err := k.Int64()
- if len(defaultVal) > 0 && err != nil {
- k.value = strconv.FormatInt(defaultVal[0], 10)
- return defaultVal[0]
- }
- return val
-}
-
-// MustUint always returns value without error,
-// it returns 0 if error occurs.
-func (k *Key) MustUint(defaultVal ...uint) uint {
- val, err := k.Uint()
- if len(defaultVal) > 0 && err != nil {
- k.value = strconv.FormatUint(uint64(defaultVal[0]), 10)
- return defaultVal[0]
- }
- return val
-}
-
-// MustUint64 always returns value without error,
-// it returns 0 if error occurs.
-func (k *Key) MustUint64(defaultVal ...uint64) uint64 {
- val, err := k.Uint64()
- if len(defaultVal) > 0 && err != nil {
- k.value = strconv.FormatUint(defaultVal[0], 10)
- return defaultVal[0]
- }
- return val
-}
-
-// MustDuration always returns value without error,
-// it returns zero value if error occurs.
-func (k *Key) MustDuration(defaultVal ...time.Duration) time.Duration {
- val, err := k.Duration()
- if len(defaultVal) > 0 && err != nil {
- k.value = defaultVal[0].String()
- return defaultVal[0]
- }
- return val
-}
-
-// MustTimeFormat always parses with given format and returns value without error,
-// it returns zero value if error occurs.
-func (k *Key) MustTimeFormat(format string, defaultVal ...time.Time) time.Time {
- val, err := k.TimeFormat(format)
- if len(defaultVal) > 0 && err != nil {
- k.value = defaultVal[0].Format(format)
- return defaultVal[0]
- }
- return val
-}
-
-// MustTime always parses with RFC3339 format and returns value without error,
-// it returns zero value if error occurs.
-func (k *Key) MustTime(defaultVal ...time.Time) time.Time {
- return k.MustTimeFormat(time.RFC3339, defaultVal...)
-}
-
-// In always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) In(defaultVal string, candidates []string) string {
- val := k.String()
- for _, cand := range candidates {
- if val == cand {
- return val
- }
- }
- return defaultVal
-}
-
-// InFloat64 always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InFloat64(defaultVal float64, candidates []float64) float64 {
- val := k.MustFloat64()
- for _, cand := range candidates {
- if val == cand {
- return val
- }
- }
- return defaultVal
-}
-
-// InInt always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InInt(defaultVal int, candidates []int) int {
- val := k.MustInt()
- for _, cand := range candidates {
- if val == cand {
- return val
- }
- }
- return defaultVal
-}
-
-// InInt64 always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InInt64(defaultVal int64, candidates []int64) int64 {
- val := k.MustInt64()
- for _, cand := range candidates {
- if val == cand {
- return val
- }
- }
- return defaultVal
-}
-
-// InUint always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InUint(defaultVal uint, candidates []uint) uint {
- val := k.MustUint()
- for _, cand := range candidates {
- if val == cand {
- return val
- }
- }
- return defaultVal
-}
-
-// InUint64 always returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InUint64(defaultVal uint64, candidates []uint64) uint64 {
- val := k.MustUint64()
- for _, cand := range candidates {
- if val == cand {
- return val
- }
- }
- return defaultVal
-}
-
-// InTimeFormat always parses with given format and returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InTimeFormat(format string, defaultVal time.Time, candidates []time.Time) time.Time {
- val := k.MustTimeFormat(format)
- for _, cand := range candidates {
- if val == cand {
- return val
- }
- }
- return defaultVal
-}
-
-// InTime always parses with RFC3339 format and returns value without error,
-// it returns default value if error occurs or doesn't fit into candidates.
-func (k *Key) InTime(defaultVal time.Time, candidates []time.Time) time.Time {
- return k.InTimeFormat(time.RFC3339, defaultVal, candidates)
-}
-
-// RangeFloat64 checks if value is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeFloat64(defaultVal, min, max float64) float64 {
- val := k.MustFloat64()
- if val < min || val > max {
- return defaultVal
- }
- return val
-}
-
-// RangeInt checks if value is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeInt(defaultVal, min, max int) int {
- val := k.MustInt()
- if val < min || val > max {
- return defaultVal
- }
- return val
-}
-
-// RangeInt64 checks if value is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeInt64(defaultVal, min, max int64) int64 {
- val := k.MustInt64()
- if val < min || val > max {
- return defaultVal
- }
- return val
-}
-
-// RangeTimeFormat checks if value with given format is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeTimeFormat(format string, defaultVal, min, max time.Time) time.Time {
- val := k.MustTimeFormat(format)
- if val.Unix() < min.Unix() || val.Unix() > max.Unix() {
- return defaultVal
- }
- return val
-}
-
-// RangeTime checks if value with RFC3339 format is in given range inclusively,
-// and returns default value if it's not.
-func (k *Key) RangeTime(defaultVal, min, max time.Time) time.Time {
- return k.RangeTimeFormat(time.RFC3339, defaultVal, min, max)
-}
-
-// Strings returns list of string divided by given delimiter.
-func (k *Key) Strings(delim string) []string {
- str := k.String()
- if len(str) == 0 {
- return []string{}
- }
-
- vals := strings.Split(str, delim)
- for i := range vals {
- // vals[i] = k.transformValue(strings.TrimSpace(vals[i]))
- vals[i] = strings.TrimSpace(vals[i])
- }
- return vals
-}
-
-// StringsWithShadows returns list of string divided by given delimiter.
-// Shadows will also be appended if any.
-func (k *Key) StringsWithShadows(delim string) []string {
- vals := k.ValueWithShadows()
- results := make([]string, 0, len(vals)*2)
- for i := range vals {
- if len(vals) == 0 {
- continue
- }
-
- results = append(results, strings.Split(vals[i], delim)...)
- }
-
- for i := range results {
- results[i] = k.transformValue(strings.TrimSpace(results[i]))
- }
- return results
-}
-
-// Float64s returns list of float64 divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Float64s(delim string) []float64 {
- vals, _ := k.getFloat64s(delim, true, false)
- return vals
-}
-
-// Ints returns list of int divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Ints(delim string) []int {
- vals, _ := k.parseInts(k.Strings(delim), true, false)
- return vals
-}
-
-// Int64s returns list of int64 divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Int64s(delim string) []int64 {
- vals, _ := k.parseInt64s(k.Strings(delim), true, false)
- return vals
-}
-
-// Uints returns list of uint divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Uints(delim string) []uint {
- vals, _ := k.getUints(delim, true, false)
- return vals
-}
-
-// Uint64s returns list of uint64 divided by given delimiter. Any invalid input will be treated as zero value.
-func (k *Key) Uint64s(delim string) []uint64 {
- vals, _ := k.getUint64s(delim, true, false)
- return vals
-}
-
-// TimesFormat parses with given format and returns list of time.Time divided by given delimiter.
-// Any invalid input will be treated as zero value (0001-01-01 00:00:00 +0000 UTC).
-func (k *Key) TimesFormat(format, delim string) []time.Time {
- vals, _ := k.getTimesFormat(format, delim, true, false)
- return vals
-}
-
-// Times parses with RFC3339 format and returns list of time.Time divided by given delimiter.
-// Any invalid input will be treated as zero value (0001-01-01 00:00:00 +0000 UTC).
-func (k *Key) Times(delim string) []time.Time {
- return k.TimesFormat(time.RFC3339, delim)
-}
-
-// ValidFloat64s returns list of float64 divided by given delimiter. If some value is not float, then
-// it will not be included to result list.
-func (k *Key) ValidFloat64s(delim string) []float64 {
- vals, _ := k.getFloat64s(delim, false, false)
- return vals
-}
-
-// ValidInts returns list of int divided by given delimiter. If some value is not integer, then it will
-// not be included to result list.
-func (k *Key) ValidInts(delim string) []int {
- vals, _ := k.parseInts(k.Strings(delim), false, false)
- return vals
-}
-
-// ValidInt64s returns list of int64 divided by given delimiter. If some value is not 64-bit integer,
-// then it will not be included to result list.
-func (k *Key) ValidInt64s(delim string) []int64 {
- vals, _ := k.parseInt64s(k.Strings(delim), false, false)
- return vals
-}
-
-// ValidUints returns list of uint divided by given delimiter. If some value is not unsigned integer,
-// then it will not be included to result list.
-func (k *Key) ValidUints(delim string) []uint {
- vals, _ := k.getUints(delim, false, false)
- return vals
-}
-
-// ValidUint64s returns list of uint64 divided by given delimiter. If some value is not 64-bit unsigned
-// integer, then it will not be included to result list.
-func (k *Key) ValidUint64s(delim string) []uint64 {
- vals, _ := k.getUint64s(delim, false, false)
- return vals
-}
-
-// ValidTimesFormat parses with given format and returns list of time.Time divided by given delimiter.
-func (k *Key) ValidTimesFormat(format, delim string) []time.Time {
- vals, _ := k.getTimesFormat(format, delim, false, false)
- return vals
-}
-
-// ValidTimes parses with RFC3339 format and returns list of time.Time divided by given delimiter.
-func (k *Key) ValidTimes(delim string) []time.Time {
- return k.ValidTimesFormat(time.RFC3339, delim)
-}
-
-// StrictFloat64s returns list of float64 divided by given delimiter or error on first invalid input.
-func (k *Key) StrictFloat64s(delim string) ([]float64, error) {
- return k.getFloat64s(delim, false, true)
-}
-
-// StrictInts returns list of int divided by given delimiter or error on first invalid input.
-func (k *Key) StrictInts(delim string) ([]int, error) {
- return k.parseInts(k.Strings(delim), false, true)
-}
-
-// StrictInt64s returns list of int64 divided by given delimiter or error on first invalid input.
-func (k *Key) StrictInt64s(delim string) ([]int64, error) {
- return k.parseInt64s(k.Strings(delim), false, true)
-}
-
-// StrictUints returns list of uint divided by given delimiter or error on first invalid input.
-func (k *Key) StrictUints(delim string) ([]uint, error) {
- return k.getUints(delim, false, true)
-}
-
-// StrictUint64s returns list of uint64 divided by given delimiter or error on first invalid input.
-func (k *Key) StrictUint64s(delim string) ([]uint64, error) {
- return k.getUint64s(delim, false, true)
-}
-
-// StrictTimesFormat parses with given format and returns list of time.Time divided by given delimiter
-// or error on first invalid input.
-func (k *Key) StrictTimesFormat(format, delim string) ([]time.Time, error) {
- return k.getTimesFormat(format, delim, false, true)
-}
-
-// StrictTimes parses with RFC3339 format and returns list of time.Time divided by given delimiter
-// or error on first invalid input.
-func (k *Key) StrictTimes(delim string) ([]time.Time, error) {
- return k.StrictTimesFormat(time.RFC3339, delim)
-}
-
-// getFloat64s returns list of float64 divided by given delimiter.
-func (k *Key) getFloat64s(delim string, addInvalid, returnOnInvalid bool) ([]float64, error) {
- strs := k.Strings(delim)
- vals := make([]float64, 0, len(strs))
- for _, str := range strs {
- val, err := strconv.ParseFloat(str, 64)
- if err != nil && returnOnInvalid {
- return nil, err
- }
- if err == nil || addInvalid {
- vals = append(vals, val)
- }
- }
- return vals, nil
-}
-
-// parseInts transforms strings to ints.
-func (k *Key) parseInts(strs []string, addInvalid, returnOnInvalid bool) ([]int, error) {
- vals := make([]int, 0, len(strs))
- for _, str := range strs {
- val, err := strconv.Atoi(str)
- if err != nil && returnOnInvalid {
- return nil, err
- }
- if err == nil || addInvalid {
- vals = append(vals, val)
- }
- }
- return vals, nil
-}
-
-// parseInt64s transforms strings to int64s.
-func (k *Key) parseInt64s(strs []string, addInvalid, returnOnInvalid bool) ([]int64, error) {
- vals := make([]int64, 0, len(strs))
- for _, str := range strs {
- val, err := strconv.ParseInt(str, 10, 64)
- if err != nil && returnOnInvalid {
- return nil, err
- }
- if err == nil || addInvalid {
- vals = append(vals, val)
- }
- }
- return vals, nil
-}
-
-// getUints returns list of uint divided by given delimiter.
-func (k *Key) getUints(delim string, addInvalid, returnOnInvalid bool) ([]uint, error) {
- strs := k.Strings(delim)
- vals := make([]uint, 0, len(strs))
- for _, str := range strs {
- val, err := strconv.ParseUint(str, 10, 0)
- if err != nil && returnOnInvalid {
- return nil, err
- }
- if err == nil || addInvalid {
- vals = append(vals, uint(val))
- }
- }
- return vals, nil
-}
-
-// getUint64s returns list of uint64 divided by given delimiter.
-func (k *Key) getUint64s(delim string, addInvalid, returnOnInvalid bool) ([]uint64, error) {
- strs := k.Strings(delim)
- vals := make([]uint64, 0, len(strs))
- for _, str := range strs {
- val, err := strconv.ParseUint(str, 10, 64)
- if err != nil && returnOnInvalid {
- return nil, err
- }
- if err == nil || addInvalid {
- vals = append(vals, val)
- }
- }
- return vals, nil
-}
-
-// getTimesFormat parses with given format and returns list of time.Time divided by given delimiter.
-func (k *Key) getTimesFormat(format, delim string, addInvalid, returnOnInvalid bool) ([]time.Time, error) {
- strs := k.Strings(delim)
- vals := make([]time.Time, 0, len(strs))
- for _, str := range strs {
- val, err := time.Parse(format, str)
- if err != nil && returnOnInvalid {
- return nil, err
- }
- if err == nil || addInvalid {
- vals = append(vals, val)
- }
- }
- return vals, nil
-}
-
-// SetValue changes key value.
-func (k *Key) SetValue(v string) {
- if k.s.f.BlockMode {
- k.s.f.lock.Lock()
- defer k.s.f.lock.Unlock()
- }
-
- k.value = v
- k.s.keysHash[k.name] = v
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/key_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/key_test.go
deleted file mode 100644
index 1281d5bf..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/key_test.go
+++ /dev/null
@@ -1,573 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "bytes"
- "fmt"
- "strings"
- "testing"
- "time"
-
- . "github.com/smartystreets/goconvey/convey"
-)
-
-func Test_Key(t *testing.T) {
- Convey("Test getting and setting values", t, func() {
- cfg, err := Load([]byte(_CONF_DATA), "testdata/conf.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- Convey("Get values in default section", func() {
- sec := cfg.Section("")
- So(sec, ShouldNotBeNil)
- So(sec.Key("NAME").Value(), ShouldEqual, "ini")
- So(sec.Key("NAME").String(), ShouldEqual, "ini")
- So(sec.Key("NAME").Validate(func(in string) string {
- return in
- }), ShouldEqual, "ini")
- So(sec.Key("NAME").Comment, ShouldEqual, "; Package name")
- So(sec.Key("IMPORT_PATH").String(), ShouldEqual, "gopkg.in/ini.v1")
- })
-
- Convey("Get values in non-default section", func() {
- sec := cfg.Section("author")
- So(sec, ShouldNotBeNil)
- So(sec.Key("NAME").String(), ShouldEqual, "Unknwon")
- So(sec.Key("GITHUB").String(), ShouldEqual, "https://github.com/Unknwon")
-
- sec = cfg.Section("package")
- So(sec, ShouldNotBeNil)
- So(sec.Key("CLONE_URL").String(), ShouldEqual, "https://gopkg.in/ini.v1")
- })
-
- Convey("Get auto-increment key names", func() {
- keys := cfg.Section("features").Keys()
- for i, k := range keys {
- So(k.Name(), ShouldEqual, fmt.Sprintf("#%d", i+1))
- }
- })
-
- Convey("Get parent-keys that are available to the child section", func() {
- parentKeys := cfg.Section("package.sub").ParentKeys()
- for _, k := range parentKeys {
- So(k.Name(), ShouldEqual, "CLONE_URL")
- }
- })
-
- Convey("Get overwrite value", func() {
- So(cfg.Section("author").Key("E-MAIL").String(), ShouldEqual, "u@gogs.io")
- })
-
- Convey("Get sections", func() {
- sections := cfg.Sections()
- for i, name := range []string{DEFAULT_SECTION, "author", "package", "package.sub", "features", "types", "array", "note", "comments", "advance"} {
- So(sections[i].Name(), ShouldEqual, name)
- }
- })
-
- Convey("Get parent section value", func() {
- So(cfg.Section("package.sub").Key("CLONE_URL").String(), ShouldEqual, "https://gopkg.in/ini.v1")
- So(cfg.Section("package.fake.sub").Key("CLONE_URL").String(), ShouldEqual, "https://gopkg.in/ini.v1")
- })
-
- Convey("Get multiple line value", func() {
- So(cfg.Section("author").Key("BIO").String(), ShouldEqual, "Gopher.\nCoding addict.\nGood man.\n")
- })
-
- Convey("Get values with type", func() {
- sec := cfg.Section("types")
- v1, err := sec.Key("BOOL").Bool()
- So(err, ShouldBeNil)
- So(v1, ShouldBeTrue)
-
- v1, err = sec.Key("BOOL_FALSE").Bool()
- So(err, ShouldBeNil)
- So(v1, ShouldBeFalse)
-
- v2, err := sec.Key("FLOAT64").Float64()
- So(err, ShouldBeNil)
- So(v2, ShouldEqual, 1.25)
-
- v3, err := sec.Key("INT").Int()
- So(err, ShouldBeNil)
- So(v3, ShouldEqual, 10)
-
- v4, err := sec.Key("INT").Int64()
- So(err, ShouldBeNil)
- So(v4, ShouldEqual, 10)
-
- v5, err := sec.Key("UINT").Uint()
- So(err, ShouldBeNil)
- So(v5, ShouldEqual, 3)
-
- v6, err := sec.Key("UINT").Uint64()
- So(err, ShouldBeNil)
- So(v6, ShouldEqual, 3)
-
- t, err := time.Parse(time.RFC3339, "2015-01-01T20:17:05Z")
- So(err, ShouldBeNil)
- v7, err := sec.Key("TIME").Time()
- So(err, ShouldBeNil)
- So(v7.String(), ShouldEqual, t.String())
-
- Convey("Must get values with type", func() {
- So(sec.Key("STRING").MustString("404"), ShouldEqual, "str")
- So(sec.Key("BOOL").MustBool(), ShouldBeTrue)
- So(sec.Key("FLOAT64").MustFloat64(), ShouldEqual, 1.25)
- So(sec.Key("INT").MustInt(), ShouldEqual, 10)
- So(sec.Key("INT").MustInt64(), ShouldEqual, 10)
- So(sec.Key("UINT").MustUint(), ShouldEqual, 3)
- So(sec.Key("UINT").MustUint64(), ShouldEqual, 3)
- So(sec.Key("TIME").MustTime().String(), ShouldEqual, t.String())
-
- dur, err := time.ParseDuration("2h45m")
- So(err, ShouldBeNil)
- So(sec.Key("DURATION").MustDuration().Seconds(), ShouldEqual, dur.Seconds())
-
- Convey("Must get values with default value", func() {
- So(sec.Key("STRING_404").MustString("404"), ShouldEqual, "404")
- So(sec.Key("BOOL_404").MustBool(true), ShouldBeTrue)
- So(sec.Key("FLOAT64_404").MustFloat64(2.5), ShouldEqual, 2.5)
- So(sec.Key("INT_404").MustInt(15), ShouldEqual, 15)
- So(sec.Key("INT64_404").MustInt64(15), ShouldEqual, 15)
- So(sec.Key("UINT_404").MustUint(6), ShouldEqual, 6)
- So(sec.Key("UINT64_404").MustUint64(6), ShouldEqual, 6)
-
- t, err := time.Parse(time.RFC3339, "2014-01-01T20:17:05Z")
- So(err, ShouldBeNil)
- So(sec.Key("TIME_404").MustTime(t).String(), ShouldEqual, t.String())
-
- So(sec.Key("DURATION_404").MustDuration(dur).Seconds(), ShouldEqual, dur.Seconds())
-
- Convey("Must should set default as key value", func() {
- So(sec.Key("STRING_404").String(), ShouldEqual, "404")
- So(sec.Key("BOOL_404").String(), ShouldEqual, "true")
- So(sec.Key("FLOAT64_404").String(), ShouldEqual, "2.5")
- So(sec.Key("INT_404").String(), ShouldEqual, "15")
- So(sec.Key("INT64_404").String(), ShouldEqual, "15")
- So(sec.Key("UINT_404").String(), ShouldEqual, "6")
- So(sec.Key("UINT64_404").String(), ShouldEqual, "6")
- So(sec.Key("TIME_404").String(), ShouldEqual, "2014-01-01T20:17:05Z")
- So(sec.Key("DURATION_404").String(), ShouldEqual, "2h45m0s")
- })
- })
- })
- })
-
- Convey("Get value with candidates", func() {
- sec := cfg.Section("types")
- So(sec.Key("STRING").In("", []string{"str", "arr", "types"}), ShouldEqual, "str")
- So(sec.Key("FLOAT64").InFloat64(0, []float64{1.25, 2.5, 3.75}), ShouldEqual, 1.25)
- So(sec.Key("INT").InInt(0, []int{10, 20, 30}), ShouldEqual, 10)
- So(sec.Key("INT").InInt64(0, []int64{10, 20, 30}), ShouldEqual, 10)
- So(sec.Key("UINT").InUint(0, []uint{3, 6, 9}), ShouldEqual, 3)
- So(sec.Key("UINT").InUint64(0, []uint64{3, 6, 9}), ShouldEqual, 3)
-
- zt, err := time.Parse(time.RFC3339, "0001-01-01T01:00:00Z")
- So(err, ShouldBeNil)
- t, err := time.Parse(time.RFC3339, "2015-01-01T20:17:05Z")
- So(err, ShouldBeNil)
- So(sec.Key("TIME").InTime(zt, []time.Time{t, time.Now(), time.Now().Add(1 * time.Second)}).String(), ShouldEqual, t.String())
-
- Convey("Get value with candidates and default value", func() {
- So(sec.Key("STRING_404").In("str", []string{"str", "arr", "types"}), ShouldEqual, "str")
- So(sec.Key("FLOAT64_404").InFloat64(1.25, []float64{1.25, 2.5, 3.75}), ShouldEqual, 1.25)
- So(sec.Key("INT_404").InInt(10, []int{10, 20, 30}), ShouldEqual, 10)
- So(sec.Key("INT64_404").InInt64(10, []int64{10, 20, 30}), ShouldEqual, 10)
- So(sec.Key("UINT_404").InUint(3, []uint{3, 6, 9}), ShouldEqual, 3)
- So(sec.Key("UINT_404").InUint64(3, []uint64{3, 6, 9}), ShouldEqual, 3)
- So(sec.Key("TIME_404").InTime(t, []time.Time{time.Now(), time.Now(), time.Now().Add(1 * time.Second)}).String(), ShouldEqual, t.String())
- })
- })
-
- Convey("Get values in range", func() {
- sec := cfg.Section("types")
- So(sec.Key("FLOAT64").RangeFloat64(0, 1, 2), ShouldEqual, 1.25)
- So(sec.Key("INT").RangeInt(0, 10, 20), ShouldEqual, 10)
- So(sec.Key("INT").RangeInt64(0, 10, 20), ShouldEqual, 10)
-
- minT, err := time.Parse(time.RFC3339, "0001-01-01T01:00:00Z")
- So(err, ShouldBeNil)
- midT, err := time.Parse(time.RFC3339, "2013-01-01T01:00:00Z")
- So(err, ShouldBeNil)
- maxT, err := time.Parse(time.RFC3339, "9999-01-01T01:00:00Z")
- So(err, ShouldBeNil)
- t, err := time.Parse(time.RFC3339, "2015-01-01T20:17:05Z")
- So(err, ShouldBeNil)
- So(sec.Key("TIME").RangeTime(t, minT, maxT).String(), ShouldEqual, t.String())
-
- Convey("Get value in range with default value", func() {
- So(sec.Key("FLOAT64").RangeFloat64(5, 0, 1), ShouldEqual, 5)
- So(sec.Key("INT").RangeInt(7, 0, 5), ShouldEqual, 7)
- So(sec.Key("INT").RangeInt64(7, 0, 5), ShouldEqual, 7)
- So(sec.Key("TIME").RangeTime(t, minT, midT).String(), ShouldEqual, t.String())
- })
- })
-
- Convey("Get values into slice", func() {
- sec := cfg.Section("array")
- So(strings.Join(sec.Key("STRINGS").Strings(","), ","), ShouldEqual, "en,zh,de")
- So(len(sec.Key("STRINGS_404").Strings(",")), ShouldEqual, 0)
-
- vals1 := sec.Key("FLOAT64S").Float64s(",")
- float64sEqual(vals1, 1.1, 2.2, 3.3)
-
- vals2 := sec.Key("INTS").Ints(",")
- intsEqual(vals2, 1, 2, 3)
-
- vals3 := sec.Key("INTS").Int64s(",")
- int64sEqual(vals3, 1, 2, 3)
-
- vals4 := sec.Key("UINTS").Uints(",")
- uintsEqual(vals4, 1, 2, 3)
-
- vals5 := sec.Key("UINTS").Uint64s(",")
- uint64sEqual(vals5, 1, 2, 3)
-
- t, err := time.Parse(time.RFC3339, "2015-01-01T20:17:05Z")
- So(err, ShouldBeNil)
- vals6 := sec.Key("TIMES").Times(",")
- timesEqual(vals6, t, t, t)
- })
-
- Convey("Get valid values into slice", func() {
- sec := cfg.Section("array")
- vals1 := sec.Key("FLOAT64S").ValidFloat64s(",")
- float64sEqual(vals1, 1.1, 2.2, 3.3)
-
- vals2 := sec.Key("INTS").ValidInts(",")
- intsEqual(vals2, 1, 2, 3)
-
- vals3 := sec.Key("INTS").ValidInt64s(",")
- int64sEqual(vals3, 1, 2, 3)
-
- vals4 := sec.Key("UINTS").ValidUints(",")
- uintsEqual(vals4, 1, 2, 3)
-
- vals5 := sec.Key("UINTS").ValidUint64s(",")
- uint64sEqual(vals5, 1, 2, 3)
-
- t, err := time.Parse(time.RFC3339, "2015-01-01T20:17:05Z")
- So(err, ShouldBeNil)
- vals6 := sec.Key("TIMES").ValidTimes(",")
- timesEqual(vals6, t, t, t)
- })
-
- Convey("Get values one type into slice of another type", func() {
- sec := cfg.Section("array")
- vals1 := sec.Key("STRINGS").ValidFloat64s(",")
- So(vals1, ShouldBeEmpty)
-
- vals2 := sec.Key("STRINGS").ValidInts(",")
- So(vals2, ShouldBeEmpty)
-
- vals3 := sec.Key("STRINGS").ValidInt64s(",")
- So(vals3, ShouldBeEmpty)
-
- vals4 := sec.Key("STRINGS").ValidUints(",")
- So(vals4, ShouldBeEmpty)
-
- vals5 := sec.Key("STRINGS").ValidUint64s(",")
- So(vals5, ShouldBeEmpty)
-
- vals6 := sec.Key("STRINGS").ValidTimes(",")
- So(vals6, ShouldBeEmpty)
- })
-
- Convey("Get valid values into slice without errors", func() {
- sec := cfg.Section("array")
- vals1, err := sec.Key("FLOAT64S").StrictFloat64s(",")
- So(err, ShouldBeNil)
- float64sEqual(vals1, 1.1, 2.2, 3.3)
-
- vals2, err := sec.Key("INTS").StrictInts(",")
- So(err, ShouldBeNil)
- intsEqual(vals2, 1, 2, 3)
-
- vals3, err := sec.Key("INTS").StrictInt64s(",")
- So(err, ShouldBeNil)
- int64sEqual(vals3, 1, 2, 3)
-
- vals4, err := sec.Key("UINTS").StrictUints(",")
- So(err, ShouldBeNil)
- uintsEqual(vals4, 1, 2, 3)
-
- vals5, err := sec.Key("UINTS").StrictUint64s(",")
- So(err, ShouldBeNil)
- uint64sEqual(vals5, 1, 2, 3)
-
- t, err := time.Parse(time.RFC3339, "2015-01-01T20:17:05Z")
- So(err, ShouldBeNil)
- vals6, err := sec.Key("TIMES").StrictTimes(",")
- So(err, ShouldBeNil)
- timesEqual(vals6, t, t, t)
- })
-
- Convey("Get invalid values into slice", func() {
- sec := cfg.Section("array")
- vals1, err := sec.Key("STRINGS").StrictFloat64s(",")
- So(vals1, ShouldBeEmpty)
- So(err, ShouldNotBeNil)
-
- vals2, err := sec.Key("STRINGS").StrictInts(",")
- So(vals2, ShouldBeEmpty)
- So(err, ShouldNotBeNil)
-
- vals3, err := sec.Key("STRINGS").StrictInt64s(",")
- So(vals3, ShouldBeEmpty)
- So(err, ShouldNotBeNil)
-
- vals4, err := sec.Key("STRINGS").StrictUints(",")
- So(vals4, ShouldBeEmpty)
- So(err, ShouldNotBeNil)
-
- vals5, err := sec.Key("STRINGS").StrictUint64s(",")
- So(vals5, ShouldBeEmpty)
- So(err, ShouldNotBeNil)
-
- vals6, err := sec.Key("STRINGS").StrictTimes(",")
- So(vals6, ShouldBeEmpty)
- So(err, ShouldNotBeNil)
- })
-
- Convey("Get key hash", func() {
- cfg.Section("").KeysHash()
- })
-
- Convey("Set key value", func() {
- k := cfg.Section("author").Key("NAME")
- k.SetValue("无闻")
- So(k.String(), ShouldEqual, "无闻")
- })
-
- Convey("Get key strings", func() {
- So(strings.Join(cfg.Section("types").KeyStrings(), ","), ShouldEqual, "STRING,BOOL,BOOL_FALSE,FLOAT64,INT,TIME,DURATION,UINT")
- })
-
- Convey("Delete a key", func() {
- cfg.Section("package.sub").DeleteKey("UNUSED_KEY")
- _, err := cfg.Section("package.sub").GetKey("UNUSED_KEY")
- So(err, ShouldNotBeNil)
- })
-
- Convey("Has Key (backwards compatible)", func() {
- sec := cfg.Section("package.sub")
- haskey1 := sec.Haskey("UNUSED_KEY")
- haskey2 := sec.Haskey("CLONE_URL")
- haskey3 := sec.Haskey("CLONE_URL_NO")
- So(haskey1, ShouldBeTrue)
- So(haskey2, ShouldBeTrue)
- So(haskey3, ShouldBeFalse)
- })
-
- Convey("Has Key", func() {
- sec := cfg.Section("package.sub")
- haskey1 := sec.HasKey("UNUSED_KEY")
- haskey2 := sec.HasKey("CLONE_URL")
- haskey3 := sec.HasKey("CLONE_URL_NO")
- So(haskey1, ShouldBeTrue)
- So(haskey2, ShouldBeTrue)
- So(haskey3, ShouldBeFalse)
- })
-
- Convey("Has Value", func() {
- sec := cfg.Section("author")
- hasvalue1 := sec.HasValue("Unknwon")
- hasvalue2 := sec.HasValue("doc")
- So(hasvalue1, ShouldBeTrue)
- So(hasvalue2, ShouldBeFalse)
- })
- })
-
- Convey("Test getting and setting bad values", t, func() {
- cfg, err := Load([]byte(_CONF_DATA), "testdata/conf.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- Convey("Create new key with empty name", func() {
- k, err := cfg.Section("").NewKey("", "")
- So(err, ShouldNotBeNil)
- So(k, ShouldBeNil)
- })
-
- Convey("Create new section with empty name", func() {
- s, err := cfg.NewSection("")
- So(err, ShouldNotBeNil)
- So(s, ShouldBeNil)
- })
-
- Convey("Create new sections with empty name", func() {
- So(cfg.NewSections(""), ShouldNotBeNil)
- })
-
- Convey("Get section that not exists", func() {
- s, err := cfg.GetSection("404")
- So(err, ShouldNotBeNil)
- So(s, ShouldBeNil)
-
- s = cfg.Section("404")
- So(s, ShouldNotBeNil)
- })
- })
-
- Convey("Test key hash clone", t, func() {
- cfg, err := Load([]byte(strings.Replace("network=tcp,addr=127.0.0.1:6379,db=4,pool_size=100,idle_timeout=180", ",", "\n", -1)))
- So(err, ShouldBeNil)
- for _, v := range cfg.Section("").KeysHash() {
- So(len(v), ShouldBeGreaterThan, 0)
- }
- })
-
- Convey("Key has empty value", t, func() {
- _conf := `key1=
-key2= ; comment`
- cfg, err := Load([]byte(_conf))
- So(err, ShouldBeNil)
- So(cfg.Section("").Key("key1").Value(), ShouldBeEmpty)
- })
-}
-
-const _CONF_GIT_CONFIG = `
-[remote "origin"]
- url = https://github.com/Antergone/test1.git
- url = https://github.com/Antergone/test2.git
-`
-
-func Test_Key_Shadows(t *testing.T) {
- Convey("Shadows keys", t, func() {
- Convey("Disable shadows", func() {
- cfg, err := Load([]byte(_CONF_GIT_CONFIG))
- So(err, ShouldBeNil)
- So(cfg.Section(`remote "origin"`).Key("url").String(), ShouldEqual, "https://github.com/Antergone/test2.git")
- })
-
- Convey("Enable shadows", func() {
- cfg, err := ShadowLoad([]byte(_CONF_GIT_CONFIG))
- So(err, ShouldBeNil)
- So(cfg.Section(`remote "origin"`).Key("url").String(), ShouldEqual, "https://github.com/Antergone/test1.git")
- So(strings.Join(cfg.Section(`remote "origin"`).Key("url").ValueWithShadows(), " "), ShouldEqual,
- "https://github.com/Antergone/test1.git https://github.com/Antergone/test2.git")
-
- Convey("Save with shadows", func() {
- var buf bytes.Buffer
- _, err := cfg.WriteTo(&buf)
- So(err, ShouldBeNil)
- So(buf.String(), ShouldEqual, `[remote "origin"]
-url = https://github.com/Antergone/test1.git
-url = https://github.com/Antergone/test2.git
-
-`)
- })
- })
- })
-}
-
-func newTestFile(block bool) *File {
- c, _ := Load([]byte(_CONF_DATA))
- c.BlockMode = block
- return c
-}
-
-func Benchmark_Key_Value(b *testing.B) {
- c := newTestFile(true)
- for i := 0; i < b.N; i++ {
- c.Section("").Key("NAME").Value()
- }
-}
-
-func Benchmark_Key_Value_NonBlock(b *testing.B) {
- c := newTestFile(false)
- for i := 0; i < b.N; i++ {
- c.Section("").Key("NAME").Value()
- }
-}
-
-func Benchmark_Key_Value_ViaSection(b *testing.B) {
- c := newTestFile(true)
- sec := c.Section("")
- for i := 0; i < b.N; i++ {
- sec.Key("NAME").Value()
- }
-}
-
-func Benchmark_Key_Value_ViaSection_NonBlock(b *testing.B) {
- c := newTestFile(false)
- sec := c.Section("")
- for i := 0; i < b.N; i++ {
- sec.Key("NAME").Value()
- }
-}
-
-func Benchmark_Key_Value_Direct(b *testing.B) {
- c := newTestFile(true)
- key := c.Section("").Key("NAME")
- for i := 0; i < b.N; i++ {
- key.Value()
- }
-}
-
-func Benchmark_Key_Value_Direct_NonBlock(b *testing.B) {
- c := newTestFile(false)
- key := c.Section("").Key("NAME")
- for i := 0; i < b.N; i++ {
- key.Value()
- }
-}
-
-func Benchmark_Key_String(b *testing.B) {
- c := newTestFile(true)
- for i := 0; i < b.N; i++ {
- _ = c.Section("").Key("NAME").String()
- }
-}
-
-func Benchmark_Key_String_NonBlock(b *testing.B) {
- c := newTestFile(false)
- for i := 0; i < b.N; i++ {
- _ = c.Section("").Key("NAME").String()
- }
-}
-
-func Benchmark_Key_String_ViaSection(b *testing.B) {
- c := newTestFile(true)
- sec := c.Section("")
- for i := 0; i < b.N; i++ {
- _ = sec.Key("NAME").String()
- }
-}
-
-func Benchmark_Key_String_ViaSection_NonBlock(b *testing.B) {
- c := newTestFile(false)
- sec := c.Section("")
- for i := 0; i < b.N; i++ {
- _ = sec.Key("NAME").String()
- }
-}
-
-func Benchmark_Key_SetValue(b *testing.B) {
- c := newTestFile(true)
- for i := 0; i < b.N; i++ {
- c.Section("").Key("NAME").SetValue("10")
- }
-}
-
-func Benchmark_Key_SetValue_VisSection(b *testing.B) {
- c := newTestFile(true)
- sec := c.Section("")
- for i := 0; i < b.N; i++ {
- sec.Key("NAME").SetValue("10")
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/parser.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/parser.go
deleted file mode 100644
index 673ef80c..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/parser.go
+++ /dev/null
@@ -1,358 +0,0 @@
-// Copyright 2015 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "bufio"
- "bytes"
- "fmt"
- "io"
- "strconv"
- "strings"
- "unicode"
-)
-
-type tokenType int
-
-const (
- _TOKEN_INVALID tokenType = iota
- _TOKEN_COMMENT
- _TOKEN_SECTION
- _TOKEN_KEY
-)
-
-type parser struct {
- buf *bufio.Reader
- isEOF bool
- count int
- comment *bytes.Buffer
-}
-
-func newParser(r io.Reader) *parser {
- return &parser{
- buf: bufio.NewReader(r),
- count: 1,
- comment: &bytes.Buffer{},
- }
-}
-
-// BOM handles header of UTF-8, UTF-16 LE and UTF-16 BE's BOM format.
-// http://en.wikipedia.org/wiki/Byte_order_mark#Representations_of_byte_order_marks_by_encoding
-func (p *parser) BOM() error {
- mask, err := p.buf.Peek(2)
- if err != nil && err != io.EOF {
- return err
- } else if len(mask) < 2 {
- return nil
- }
-
- switch {
- case mask[0] == 254 && mask[1] == 255:
- fallthrough
- case mask[0] == 255 && mask[1] == 254:
- p.buf.Read(mask)
- case mask[0] == 239 && mask[1] == 187:
- mask, err := p.buf.Peek(3)
- if err != nil && err != io.EOF {
- return err
- } else if len(mask) < 3 {
- return nil
- }
- if mask[2] == 191 {
- p.buf.Read(mask)
- }
- }
- return nil
-}
-
-func (p *parser) readUntil(delim byte) ([]byte, error) {
- data, err := p.buf.ReadBytes(delim)
- if err != nil {
- if err == io.EOF {
- p.isEOF = true
- } else {
- return nil, err
- }
- }
- return data, nil
-}
-
-func cleanComment(in []byte) ([]byte, bool) {
- i := bytes.IndexAny(in, "#;")
- if i == -1 {
- return nil, false
- }
- return in[i:], true
-}
-
-func readKeyName(in []byte) (string, int, error) {
- line := string(in)
-
- // Check if key name surrounded by quotes.
- var keyQuote string
- if line[0] == '"' {
- if len(line) > 6 && string(line[0:3]) == `"""` {
- keyQuote = `"""`
- } else {
- keyQuote = `"`
- }
- } else if line[0] == '`' {
- keyQuote = "`"
- }
-
- // Get out key name
- endIdx := -1
- if len(keyQuote) > 0 {
- startIdx := len(keyQuote)
- // FIXME: fail case -> """"""name"""=value
- pos := strings.Index(line[startIdx:], keyQuote)
- if pos == -1 {
- return "", -1, fmt.Errorf("missing closing key quote: %s", line)
- }
- pos += startIdx
-
- // Find key-value delimiter
- i := strings.IndexAny(line[pos+startIdx:], "=:")
- if i < 0 {
- return "", -1, ErrDelimiterNotFound{line}
- }
- endIdx = pos + i
- return strings.TrimSpace(line[startIdx:pos]), endIdx + startIdx + 1, nil
- }
-
- endIdx = strings.IndexAny(line, "=:")
- if endIdx < 0 {
- return "", -1, ErrDelimiterNotFound{line}
- }
- return strings.TrimSpace(line[0:endIdx]), endIdx + 1, nil
-}
-
-func (p *parser) readMultilines(line, val, valQuote string) (string, error) {
- for {
- data, err := p.readUntil('\n')
- if err != nil {
- return "", err
- }
- next := string(data)
-
- pos := strings.LastIndex(next, valQuote)
- if pos > -1 {
- val += next[:pos]
-
- comment, has := cleanComment([]byte(next[pos:]))
- if has {
- p.comment.Write(bytes.TrimSpace(comment))
- }
- break
- }
- val += next
- if p.isEOF {
- return "", fmt.Errorf("missing closing key quote from '%s' to '%s'", line, next)
- }
- }
- return val, nil
-}
-
-func (p *parser) readContinuationLines(val string) (string, error) {
- for {
- data, err := p.readUntil('\n')
- if err != nil {
- return "", err
- }
- next := strings.TrimSpace(string(data))
-
- if len(next) == 0 {
- break
- }
- val += next
- if val[len(val)-1] != '\\' {
- break
- }
- val = val[:len(val)-1]
- }
- return val, nil
-}
-
-// hasSurroundedQuote check if and only if the first and last characters
-// are quotes \" or \'.
-// It returns false if any other parts also contain same kind of quotes.
-func hasSurroundedQuote(in string, quote byte) bool {
- return len(in) > 2 && in[0] == quote && in[len(in)-1] == quote &&
- strings.IndexByte(in[1:], quote) == len(in)-2
-}
-
-func (p *parser) readValue(in []byte, ignoreContinuation bool) (string, error) {
- line := strings.TrimLeftFunc(string(in), unicode.IsSpace)
- if len(line) == 0 {
- return "", nil
- }
-
- var valQuote string
- if len(line) > 3 && string(line[0:3]) == `"""` {
- valQuote = `"""`
- } else if line[0] == '`' {
- valQuote = "`"
- }
-
- if len(valQuote) > 0 {
- startIdx := len(valQuote)
- pos := strings.LastIndex(line[startIdx:], valQuote)
- // Check for multi-line value
- if pos == -1 {
- return p.readMultilines(line, line[startIdx:], valQuote)
- }
-
- return line[startIdx : pos+startIdx], nil
- }
-
- // Won't be able to reach here if value only contains whitespace.
- line = strings.TrimSpace(line)
-
- // Check continuation lines when desired.
- if !ignoreContinuation && line[len(line)-1] == '\\' {
- return p.readContinuationLines(line[:len(line)-1])
- }
-
- i := strings.IndexAny(line, "#;")
- if i > -1 {
- p.comment.WriteString(line[i:])
- line = strings.TrimSpace(line[:i])
- }
-
- // Trim single quotes
- if hasSurroundedQuote(line, '\'') ||
- hasSurroundedQuote(line, '"') {
- line = line[1 : len(line)-1]
- }
- return line, nil
-}
-
-// parse parses data through an io.Reader.
-func (f *File) parse(reader io.Reader) (err error) {
- p := newParser(reader)
- if err = p.BOM(); err != nil {
- return fmt.Errorf("BOM: %v", err)
- }
-
- // Ignore error because default section name is never empty string.
- section, _ := f.NewSection(DEFAULT_SECTION)
-
- var line []byte
- var inUnparseableSection bool
- for !p.isEOF {
- line, err = p.readUntil('\n')
- if err != nil {
- return err
- }
-
- line = bytes.TrimLeftFunc(line, unicode.IsSpace)
- if len(line) == 0 {
- continue
- }
-
- // Comments
- if line[0] == '#' || line[0] == ';' {
- // Note: we do not care ending line break,
- // it is needed for adding second line,
- // so just clean it once at the end when set to value.
- p.comment.Write(line)
- continue
- }
-
- // Section
- if line[0] == '[' {
- // Read to the next ']' (TODO: support quoted strings)
- // TODO(unknwon): use LastIndexByte when stop supporting Go1.4
- closeIdx := bytes.LastIndex(line, []byte("]"))
- if closeIdx == -1 {
- return fmt.Errorf("unclosed section: %s", line)
- }
-
- name := string(line[1:closeIdx])
- section, err = f.NewSection(name)
- if err != nil {
- return err
- }
-
- comment, has := cleanComment(line[closeIdx+1:])
- if has {
- p.comment.Write(comment)
- }
-
- section.Comment = strings.TrimSpace(p.comment.String())
-
- // Reset aotu-counter and comments
- p.comment.Reset()
- p.count = 1
-
- inUnparseableSection = false
- for i := range f.options.UnparseableSections {
- if f.options.UnparseableSections[i] == name ||
- (f.options.Insensitive && strings.ToLower(f.options.UnparseableSections[i]) == strings.ToLower(name)) {
- inUnparseableSection = true
- continue
- }
- }
- continue
- }
-
- if inUnparseableSection {
- section.isRawSection = true
- section.rawBody += string(line)
- continue
- }
-
- kname, offset, err := readKeyName(line)
- if err != nil {
- // Treat as boolean key when desired, and whole line is key name.
- if IsErrDelimiterNotFound(err) && f.options.AllowBooleanKeys {
- kname, err := p.readValue(line, f.options.IgnoreContinuation)
- if err != nil {
- return err
- }
- key, err := section.NewBooleanKey(kname)
- if err != nil {
- return err
- }
- key.Comment = strings.TrimSpace(p.comment.String())
- p.comment.Reset()
- continue
- }
- return err
- }
-
- // Auto increment.
- isAutoIncr := false
- if kname == "-" {
- isAutoIncr = true
- kname = "#" + strconv.Itoa(p.count)
- p.count++
- }
-
- value, err := p.readValue(line[offset:], f.options.IgnoreContinuation)
- if err != nil {
- return err
- }
-
- key, err := section.NewKey(kname, value)
- if err != nil {
- return err
- }
- key.isAutoIncrement = isAutoIncr
- key.Comment = strings.TrimSpace(p.comment.String())
- p.comment.Reset()
- }
- return nil
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/parser_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/parser_test.go
deleted file mode 100644
index 05258195..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/parser_test.go
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright 2016 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "testing"
-
- . "github.com/smartystreets/goconvey/convey"
-)
-
-func Test_BOM(t *testing.T) {
- Convey("Test handling BOM", t, func() {
- Convey("UTF-8-BOM", func() {
- cfg, err := Load("testdata/UTF-8-BOM.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- So(cfg.Section("author").Key("E-MAIL").String(), ShouldEqual, "u@gogs.io")
- })
-
- Convey("UTF-16-LE-BOM", func() {
- cfg, err := Load("testdata/UTF-16-LE-BOM.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
- })
-
- Convey("UTF-16-BE-BOM", func() {
- })
- })
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/section.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/section.go
deleted file mode 100644
index c9fa27e9..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/section.go
+++ /dev/null
@@ -1,234 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "errors"
- "fmt"
- "strings"
-)
-
-// Section represents a config section.
-type Section struct {
- f *File
- Comment string
- name string
- keys map[string]*Key
- keyList []string
- keysHash map[string]string
-
- isRawSection bool
- rawBody string
-}
-
-func newSection(f *File, name string) *Section {
- return &Section{
- f: f,
- name: name,
- keys: make(map[string]*Key),
- keyList: make([]string, 0, 10),
- keysHash: make(map[string]string),
- }
-}
-
-// Name returns name of Section.
-func (s *Section) Name() string {
- return s.name
-}
-
-// Body returns rawBody of Section if the section was marked as unparseable.
-// It still follows the other rules of the INI format surrounding leading/trailing whitespace.
-func (s *Section) Body() string {
- return strings.TrimSpace(s.rawBody)
-}
-
-// NewKey creates a new key to given section.
-func (s *Section) NewKey(name, val string) (*Key, error) {
- if len(name) == 0 {
- return nil, errors.New("error creating new key: empty key name")
- } else if s.f.options.Insensitive {
- name = strings.ToLower(name)
- }
-
- if s.f.BlockMode {
- s.f.lock.Lock()
- defer s.f.lock.Unlock()
- }
-
- if inSlice(name, s.keyList) {
- if s.f.options.AllowShadows {
- if err := s.keys[name].addShadow(val); err != nil {
- return nil, err
- }
- } else {
- s.keys[name].value = val
- }
- return s.keys[name], nil
- }
-
- s.keyList = append(s.keyList, name)
- s.keys[name] = newKey(s, name, val)
- s.keysHash[name] = val
- return s.keys[name], nil
-}
-
-// NewBooleanKey creates a new boolean type key to given section.
-func (s *Section) NewBooleanKey(name string) (*Key, error) {
- key, err := s.NewKey(name, "true")
- if err != nil {
- return nil, err
- }
-
- key.isBooleanType = true
- return key, nil
-}
-
-// GetKey returns key in section by given name.
-func (s *Section) GetKey(name string) (*Key, error) {
- // FIXME: change to section level lock?
- if s.f.BlockMode {
- s.f.lock.RLock()
- }
- if s.f.options.Insensitive {
- name = strings.ToLower(name)
- }
- key := s.keys[name]
- if s.f.BlockMode {
- s.f.lock.RUnlock()
- }
-
- if key == nil {
- // Check if it is a child-section.
- sname := s.name
- for {
- if i := strings.LastIndex(sname, "."); i > -1 {
- sname = sname[:i]
- sec, err := s.f.GetSection(sname)
- if err != nil {
- continue
- }
- return sec.GetKey(name)
- } else {
- break
- }
- }
- return nil, fmt.Errorf("error when getting key of section '%s': key '%s' not exists", s.name, name)
- }
- return key, nil
-}
-
-// HasKey returns true if section contains a key with given name.
-func (s *Section) HasKey(name string) bool {
- key, _ := s.GetKey(name)
- return key != nil
-}
-
-// Haskey is a backwards-compatible name for HasKey.
-func (s *Section) Haskey(name string) bool {
- return s.HasKey(name)
-}
-
-// HasValue returns true if section contains given raw value.
-func (s *Section) HasValue(value string) bool {
- if s.f.BlockMode {
- s.f.lock.RLock()
- defer s.f.lock.RUnlock()
- }
-
- for _, k := range s.keys {
- if value == k.value {
- return true
- }
- }
- return false
-}
-
-// Key assumes named Key exists in section and returns a zero-value when not.
-func (s *Section) Key(name string) *Key {
- key, err := s.GetKey(name)
- if err != nil {
- // It's OK here because the only possible error is empty key name,
- // but if it's empty, this piece of code won't be executed.
- key, _ = s.NewKey(name, "")
- return key
- }
- return key
-}
-
-// Keys returns list of keys of section.
-func (s *Section) Keys() []*Key {
- keys := make([]*Key, len(s.keyList))
- for i := range s.keyList {
- keys[i] = s.Key(s.keyList[i])
- }
- return keys
-}
-
-// ParentKeys returns list of keys of parent section.
-func (s *Section) ParentKeys() []*Key {
- var parentKeys []*Key
- sname := s.name
- for {
- if i := strings.LastIndex(sname, "."); i > -1 {
- sname = sname[:i]
- sec, err := s.f.GetSection(sname)
- if err != nil {
- continue
- }
- parentKeys = append(parentKeys, sec.Keys()...)
- } else {
- break
- }
-
- }
- return parentKeys
-}
-
-// KeyStrings returns list of key names of section.
-func (s *Section) KeyStrings() []string {
- list := make([]string, len(s.keyList))
- copy(list, s.keyList)
- return list
-}
-
-// KeysHash returns keys hash consisting of names and values.
-func (s *Section) KeysHash() map[string]string {
- if s.f.BlockMode {
- s.f.lock.RLock()
- defer s.f.lock.RUnlock()
- }
-
- hash := map[string]string{}
- for key, value := range s.keysHash {
- hash[key] = value
- }
- return hash
-}
-
-// DeleteKey deletes a key from section.
-func (s *Section) DeleteKey(name string) {
- if s.f.BlockMode {
- s.f.lock.Lock()
- defer s.f.lock.Unlock()
- }
-
- for i, k := range s.keyList {
- if k == name {
- s.keyList = append(s.keyList[:i], s.keyList[i+1:]...)
- delete(s.keys, name)
- return
- }
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/section_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/section_test.go
deleted file mode 100644
index 80282c19..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/section_test.go
+++ /dev/null
@@ -1,75 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "strings"
- "testing"
-
- . "github.com/smartystreets/goconvey/convey"
-)
-
-func Test_Section(t *testing.T) {
- Convey("Test CRD sections", t, func() {
- cfg, err := Load([]byte(_CONF_DATA), "testdata/conf.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- Convey("Get section strings", func() {
- So(strings.Join(cfg.SectionStrings(), ","), ShouldEqual, "DEFAULT,author,package,package.sub,features,types,array,note,comments,advance")
- })
-
- Convey("Delete a section", func() {
- cfg.DeleteSection("")
- So(cfg.SectionStrings()[0], ShouldNotEqual, DEFAULT_SECTION)
- })
-
- Convey("Create new sections", func() {
- cfg.NewSections("test", "test2")
- _, err := cfg.GetSection("test")
- So(err, ShouldBeNil)
- _, err = cfg.GetSection("test2")
- So(err, ShouldBeNil)
- })
- })
-}
-
-func Test_SectionRaw(t *testing.T) {
- Convey("Test section raw string", t, func() {
- cfg, err := LoadSources(
- LoadOptions{
- Insensitive: true,
- UnparseableSections: []string{"core_lesson", "comments"},
- },
- "testdata/aicc.ini")
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- Convey("Get section strings", func() {
- So(strings.Join(cfg.SectionStrings(), ","), ShouldEqual, "DEFAULT,core,core_lesson,comments")
- })
-
- Convey("Validate non-raw section", func() {
- val, err := cfg.Section("core").GetKey("lesson_status")
- So(err, ShouldBeNil)
- So(val.String(), ShouldEqual, "C")
- })
-
- Convey("Validate raw section", func() {
- So(cfg.Section("core_lesson").Body(), ShouldEqual, `my lesson state data – 1111111111111111111000000000000000001110000
-111111111111111111100000000000111000000000 – end my lesson state data`)
- })
- })
-} \ No newline at end of file
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/struct.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/struct.go
deleted file mode 100644
index 509c682f..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/struct.go
+++ /dev/null
@@ -1,450 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "bytes"
- "errors"
- "fmt"
- "reflect"
- "strings"
- "time"
- "unicode"
-)
-
-// NameMapper represents a ini tag name mapper.
-type NameMapper func(string) string
-
-// Built-in name getters.
-var (
- // AllCapsUnderscore converts to format ALL_CAPS_UNDERSCORE.
- AllCapsUnderscore NameMapper = func(raw string) string {
- newstr := make([]rune, 0, len(raw))
- for i, chr := range raw {
- if isUpper := 'A' <= chr && chr <= 'Z'; isUpper {
- if i > 0 {
- newstr = append(newstr, '_')
- }
- }
- newstr = append(newstr, unicode.ToUpper(chr))
- }
- return string(newstr)
- }
- // TitleUnderscore converts to format title_underscore.
- TitleUnderscore NameMapper = func(raw string) string {
- newstr := make([]rune, 0, len(raw))
- for i, chr := range raw {
- if isUpper := 'A' <= chr && chr <= 'Z'; isUpper {
- if i > 0 {
- newstr = append(newstr, '_')
- }
- chr -= ('A' - 'a')
- }
- newstr = append(newstr, chr)
- }
- return string(newstr)
- }
-)
-
-func (s *Section) parseFieldName(raw, actual string) string {
- if len(actual) > 0 {
- return actual
- }
- if s.f.NameMapper != nil {
- return s.f.NameMapper(raw)
- }
- return raw
-}
-
-func parseDelim(actual string) string {
- if len(actual) > 0 {
- return actual
- }
- return ","
-}
-
-var reflectTime = reflect.TypeOf(time.Now()).Kind()
-
-// setSliceWithProperType sets proper values to slice based on its type.
-func setSliceWithProperType(key *Key, field reflect.Value, delim string, allowShadow bool) error {
- var strs []string
- if allowShadow {
- strs = key.StringsWithShadows(delim)
- } else {
- strs = key.Strings(delim)
- }
-
- numVals := len(strs)
- if numVals == 0 {
- return nil
- }
-
- var vals interface{}
-
- sliceOf := field.Type().Elem().Kind()
- switch sliceOf {
- case reflect.String:
- vals = strs
- case reflect.Int:
- vals, _ = key.parseInts(strs, true, false)
- case reflect.Int64:
- vals, _ = key.parseInt64s(strs, true, false)
- case reflect.Uint:
- vals = key.Uints(delim)
- case reflect.Uint64:
- vals = key.Uint64s(delim)
- case reflect.Float64:
- vals = key.Float64s(delim)
- case reflectTime:
- vals = key.Times(delim)
- default:
- return fmt.Errorf("unsupported type '[]%s'", sliceOf)
- }
-
- slice := reflect.MakeSlice(field.Type(), numVals, numVals)
- for i := 0; i < numVals; i++ {
- switch sliceOf {
- case reflect.String:
- slice.Index(i).Set(reflect.ValueOf(vals.([]string)[i]))
- case reflect.Int:
- slice.Index(i).Set(reflect.ValueOf(vals.([]int)[i]))
- case reflect.Int64:
- slice.Index(i).Set(reflect.ValueOf(vals.([]int64)[i]))
- case reflect.Uint:
- slice.Index(i).Set(reflect.ValueOf(vals.([]uint)[i]))
- case reflect.Uint64:
- slice.Index(i).Set(reflect.ValueOf(vals.([]uint64)[i]))
- case reflect.Float64:
- slice.Index(i).Set(reflect.ValueOf(vals.([]float64)[i]))
- case reflectTime:
- slice.Index(i).Set(reflect.ValueOf(vals.([]time.Time)[i]))
- }
- }
- field.Set(slice)
- return nil
-}
-
-// setWithProperType sets proper value to field based on its type,
-// but it does not return error for failing parsing,
-// because we want to use default value that is already assigned to strcut.
-func setWithProperType(t reflect.Type, key *Key, field reflect.Value, delim string, allowShadow bool) error {
- switch t.Kind() {
- case reflect.String:
- if len(key.String()) == 0 {
- return nil
- }
- field.SetString(key.String())
- case reflect.Bool:
- boolVal, err := key.Bool()
- if err != nil {
- return nil
- }
- field.SetBool(boolVal)
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- durationVal, err := key.Duration()
- // Skip zero value
- if err == nil && int(durationVal) > 0 {
- field.Set(reflect.ValueOf(durationVal))
- return nil
- }
-
- intVal, err := key.Int64()
- if err != nil || intVal == 0 {
- return nil
- }
- field.SetInt(intVal)
- // byte is an alias for uint8, so supporting uint8 breaks support for byte
- case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- durationVal, err := key.Duration()
- // Skip zero value
- if err == nil && int(durationVal) > 0 {
- field.Set(reflect.ValueOf(durationVal))
- return nil
- }
-
- uintVal, err := key.Uint64()
- if err != nil {
- return nil
- }
- field.SetUint(uintVal)
-
- case reflect.Float32, reflect.Float64:
- floatVal, err := key.Float64()
- if err != nil {
- return nil
- }
- field.SetFloat(floatVal)
- case reflectTime:
- timeVal, err := key.Time()
- if err != nil {
- return nil
- }
- field.Set(reflect.ValueOf(timeVal))
- case reflect.Slice:
- return setSliceWithProperType(key, field, delim, allowShadow)
- default:
- return fmt.Errorf("unsupported type '%s'", t)
- }
- return nil
-}
-
-func parseTagOptions(tag string) (rawName string, omitEmpty bool, allowShadow bool) {
- opts := strings.SplitN(tag, ",", 3)
- rawName = opts[0]
- if len(opts) > 1 {
- omitEmpty = opts[1] == "omitempty"
- }
- if len(opts) > 2 {
- allowShadow = opts[2] == "allowshadow"
- }
- return rawName, omitEmpty, allowShadow
-}
-
-func (s *Section) mapTo(val reflect.Value) error {
- if val.Kind() == reflect.Ptr {
- val = val.Elem()
- }
- typ := val.Type()
-
- for i := 0; i < typ.NumField(); i++ {
- field := val.Field(i)
- tpField := typ.Field(i)
-
- tag := tpField.Tag.Get("ini")
- if tag == "-" {
- continue
- }
-
- rawName, _, allowShadow := parseTagOptions(tag)
- fieldName := s.parseFieldName(tpField.Name, rawName)
- if len(fieldName) == 0 || !field.CanSet() {
- continue
- }
-
- isAnonymous := tpField.Type.Kind() == reflect.Ptr && tpField.Anonymous
- isStruct := tpField.Type.Kind() == reflect.Struct
- if isAnonymous {
- field.Set(reflect.New(tpField.Type.Elem()))
- }
-
- if isAnonymous || isStruct {
- if sec, err := s.f.GetSection(fieldName); err == nil {
- if err = sec.mapTo(field); err != nil {
- return fmt.Errorf("error mapping field(%s): %v", fieldName, err)
- }
- continue
- }
- }
-
- if key, err := s.GetKey(fieldName); err == nil {
- delim := parseDelim(tpField.Tag.Get("delim"))
- if err = setWithProperType(tpField.Type, key, field, delim, allowShadow); err != nil {
- return fmt.Errorf("error mapping field(%s): %v", fieldName, err)
- }
- }
- }
- return nil
-}
-
-// MapTo maps section to given struct.
-func (s *Section) MapTo(v interface{}) error {
- typ := reflect.TypeOf(v)
- val := reflect.ValueOf(v)
- if typ.Kind() == reflect.Ptr {
- typ = typ.Elem()
- val = val.Elem()
- } else {
- return errors.New("cannot map to non-pointer struct")
- }
-
- return s.mapTo(val)
-}
-
-// MapTo maps file to given struct.
-func (f *File) MapTo(v interface{}) error {
- return f.Section("").MapTo(v)
-}
-
-// MapTo maps data sources to given struct with name mapper.
-func MapToWithMapper(v interface{}, mapper NameMapper, source interface{}, others ...interface{}) error {
- cfg, err := Load(source, others...)
- if err != nil {
- return err
- }
- cfg.NameMapper = mapper
- return cfg.MapTo(v)
-}
-
-// MapTo maps data sources to given struct.
-func MapTo(v, source interface{}, others ...interface{}) error {
- return MapToWithMapper(v, nil, source, others...)
-}
-
-// reflectSliceWithProperType does the opposite thing as setSliceWithProperType.
-func reflectSliceWithProperType(key *Key, field reflect.Value, delim string) error {
- slice := field.Slice(0, field.Len())
- if field.Len() == 0 {
- return nil
- }
-
- var buf bytes.Buffer
- sliceOf := field.Type().Elem().Kind()
- for i := 0; i < field.Len(); i++ {
- switch sliceOf {
- case reflect.String:
- buf.WriteString(slice.Index(i).String())
- case reflect.Int, reflect.Int64:
- buf.WriteString(fmt.Sprint(slice.Index(i).Int()))
- case reflect.Uint, reflect.Uint64:
- buf.WriteString(fmt.Sprint(slice.Index(i).Uint()))
- case reflect.Float64:
- buf.WriteString(fmt.Sprint(slice.Index(i).Float()))
- case reflectTime:
- buf.WriteString(slice.Index(i).Interface().(time.Time).Format(time.RFC3339))
- default:
- return fmt.Errorf("unsupported type '[]%s'", sliceOf)
- }
- buf.WriteString(delim)
- }
- key.SetValue(buf.String()[:buf.Len()-1])
- return nil
-}
-
-// reflectWithProperType does the opposite thing as setWithProperType.
-func reflectWithProperType(t reflect.Type, key *Key, field reflect.Value, delim string) error {
- switch t.Kind() {
- case reflect.String:
- key.SetValue(field.String())
- case reflect.Bool:
- key.SetValue(fmt.Sprint(field.Bool()))
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- key.SetValue(fmt.Sprint(field.Int()))
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- key.SetValue(fmt.Sprint(field.Uint()))
- case reflect.Float32, reflect.Float64:
- key.SetValue(fmt.Sprint(field.Float()))
- case reflectTime:
- key.SetValue(fmt.Sprint(field.Interface().(time.Time).Format(time.RFC3339)))
- case reflect.Slice:
- return reflectSliceWithProperType(key, field, delim)
- default:
- return fmt.Errorf("unsupported type '%s'", t)
- }
- return nil
-}
-
-// CR: copied from encoding/json/encode.go with modifications of time.Time support.
-// TODO: add more test coverage.
-func isEmptyValue(v reflect.Value) bool {
- switch v.Kind() {
- case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
- return v.Len() == 0
- case reflect.Bool:
- return !v.Bool()
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return v.Int() == 0
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- return v.Uint() == 0
- case reflect.Float32, reflect.Float64:
- return v.Float() == 0
- case reflectTime:
- return v.Interface().(time.Time).IsZero()
- case reflect.Interface, reflect.Ptr:
- return v.IsNil()
- }
- return false
-}
-
-func (s *Section) reflectFrom(val reflect.Value) error {
- if val.Kind() == reflect.Ptr {
- val = val.Elem()
- }
- typ := val.Type()
-
- for i := 0; i < typ.NumField(); i++ {
- field := val.Field(i)
- tpField := typ.Field(i)
-
- tag := tpField.Tag.Get("ini")
- if tag == "-" {
- continue
- }
-
- opts := strings.SplitN(tag, ",", 2)
- if len(opts) == 2 && opts[1] == "omitempty" && isEmptyValue(field) {
- continue
- }
-
- fieldName := s.parseFieldName(tpField.Name, opts[0])
- if len(fieldName) == 0 || !field.CanSet() {
- continue
- }
-
- if (tpField.Type.Kind() == reflect.Ptr && tpField.Anonymous) ||
- (tpField.Type.Kind() == reflect.Struct && tpField.Type.Name() != "Time") {
- // Note: The only error here is section doesn't exist.
- sec, err := s.f.GetSection(fieldName)
- if err != nil {
- // Note: fieldName can never be empty here, ignore error.
- sec, _ = s.f.NewSection(fieldName)
- }
- if err = sec.reflectFrom(field); err != nil {
- return fmt.Errorf("error reflecting field (%s): %v", fieldName, err)
- }
- continue
- }
-
- // Note: Same reason as secion.
- key, err := s.GetKey(fieldName)
- if err != nil {
- key, _ = s.NewKey(fieldName, "")
- }
- if err = reflectWithProperType(tpField.Type, key, field, parseDelim(tpField.Tag.Get("delim"))); err != nil {
- return fmt.Errorf("error reflecting field (%s): %v", fieldName, err)
- }
-
- }
- return nil
-}
-
-// ReflectFrom reflects secion from given struct.
-func (s *Section) ReflectFrom(v interface{}) error {
- typ := reflect.TypeOf(v)
- val := reflect.ValueOf(v)
- if typ.Kind() == reflect.Ptr {
- typ = typ.Elem()
- val = val.Elem()
- } else {
- return errors.New("cannot reflect from non-pointer struct")
- }
-
- return s.reflectFrom(val)
-}
-
-// ReflectFrom reflects file from given struct.
-func (f *File) ReflectFrom(v interface{}) error {
- return f.Section("").ReflectFrom(v)
-}
-
-// ReflectFrom reflects data sources from given struct with name mapper.
-func ReflectFromWithMapper(cfg *File, v interface{}, mapper NameMapper) error {
- cfg.NameMapper = mapper
- return cfg.ReflectFrom(v)
-}
-
-// ReflectFrom reflects data sources from given struct.
-func ReflectFrom(cfg *File, v interface{}) error {
- return ReflectFromWithMapper(cfg, v, nil)
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/struct_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/struct_test.go
deleted file mode 100644
index 7237715a..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/struct_test.go
+++ /dev/null
@@ -1,337 +0,0 @@
-// Copyright 2014 Unknwon
-//
-// Licensed under the Apache License, Version 2.0 (the "License"): you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations
-// under the License.
-
-package ini
-
-import (
- "bytes"
- "fmt"
- "strings"
- "testing"
- "time"
-
- . "github.com/smartystreets/goconvey/convey"
-)
-
-type testNested struct {
- Cities []string `delim:"|"`
- Visits []time.Time
- Years []int
- Numbers []int64
- Ages []uint
- Populations []uint64
- Coordinates []float64
- Note string
- Unused int `ini:"-"`
-}
-
-type testEmbeded struct {
- GPA float64
-}
-
-type testStruct struct {
- Name string `ini:"NAME"`
- Age int
- Male bool
- Money float64
- Born time.Time
- Time time.Duration `ini:"Duration"`
- Others testNested
- *testEmbeded `ini:"grade"`
- Unused int `ini:"-"`
- Unsigned uint
- Omitted bool `ini:"omitthis,omitempty"`
- Shadows []string `ini:",,allowshadow"`
- ShadowInts []int `ini:"Shadows,,allowshadow"`
-}
-
-const _CONF_DATA_STRUCT = `
-NAME = Unknwon
-Age = 21
-Male = true
-Money = 1.25
-Born = 1993-10-07T20:17:05Z
-Duration = 2h45m
-Unsigned = 3
-omitthis = true
-Shadows = 1, 2
-Shadows = 3, 4
-
-[Others]
-Cities = HangZhou|Boston
-Visits = 1993-10-07T20:17:05Z, 1993-10-07T20:17:05Z
-Years = 1993,1994
-Numbers = 10010,10086
-Ages = 18,19
-Populations = 12345678,98765432
-Coordinates = 192.168,10.11
-Note = Hello world!
-
-[grade]
-GPA = 2.8
-
-[foo.bar]
-Here = there
-When = then
-`
-
-type unsupport struct {
- Byte byte
-}
-
-type unsupport2 struct {
- Others struct {
- Cities byte
- }
-}
-
-type unsupport3 struct {
- Cities byte
-}
-
-type unsupport4 struct {
- *unsupport3 `ini:"Others"`
-}
-
-type defaultValue struct {
- Name string
- Age int
- Male bool
- Money float64
- Born time.Time
- Cities []string
-}
-
-type fooBar struct {
- Here, When string
-}
-
-const _INVALID_DATA_CONF_STRUCT = `
-Name =
-Age = age
-Male = 123
-Money = money
-Born = nil
-Cities =
-`
-
-func Test_Struct(t *testing.T) {
- Convey("Map to struct", t, func() {
- Convey("Map file to struct", func() {
- ts := new(testStruct)
- So(MapTo(ts, []byte(_CONF_DATA_STRUCT)), ShouldBeNil)
-
- So(ts.Name, ShouldEqual, "Unknwon")
- So(ts.Age, ShouldEqual, 21)
- So(ts.Male, ShouldBeTrue)
- So(ts.Money, ShouldEqual, 1.25)
- So(ts.Unsigned, ShouldEqual, 3)
-
- t, err := time.Parse(time.RFC3339, "1993-10-07T20:17:05Z")
- So(err, ShouldBeNil)
- So(ts.Born.String(), ShouldEqual, t.String())
-
- dur, err := time.ParseDuration("2h45m")
- So(err, ShouldBeNil)
- So(ts.Time.Seconds(), ShouldEqual, dur.Seconds())
-
- So(strings.Join(ts.Others.Cities, ","), ShouldEqual, "HangZhou,Boston")
- So(ts.Others.Visits[0].String(), ShouldEqual, t.String())
- So(fmt.Sprint(ts.Others.Years), ShouldEqual, "[1993 1994]")
- So(fmt.Sprint(ts.Others.Numbers), ShouldEqual, "[10010 10086]")
- So(fmt.Sprint(ts.Others.Ages), ShouldEqual, "[18 19]")
- So(fmt.Sprint(ts.Others.Populations), ShouldEqual, "[12345678 98765432]")
- So(fmt.Sprint(ts.Others.Coordinates), ShouldEqual, "[192.168 10.11]")
- So(ts.Others.Note, ShouldEqual, "Hello world!")
- So(ts.testEmbeded.GPA, ShouldEqual, 2.8)
- })
-
- Convey("Map section to struct", func() {
- foobar := new(fooBar)
- f, err := Load([]byte(_CONF_DATA_STRUCT))
- So(err, ShouldBeNil)
-
- So(f.Section("foo.bar").MapTo(foobar), ShouldBeNil)
- So(foobar.Here, ShouldEqual, "there")
- So(foobar.When, ShouldEqual, "then")
- })
-
- Convey("Map to non-pointer struct", func() {
- cfg, err := Load([]byte(_CONF_DATA_STRUCT))
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- So(cfg.MapTo(testStruct{}), ShouldNotBeNil)
- })
-
- Convey("Map to unsupported type", func() {
- cfg, err := Load([]byte(_CONF_DATA_STRUCT))
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- cfg.NameMapper = func(raw string) string {
- if raw == "Byte" {
- return "NAME"
- }
- return raw
- }
- So(cfg.MapTo(&unsupport{}), ShouldNotBeNil)
- So(cfg.MapTo(&unsupport2{}), ShouldNotBeNil)
- So(cfg.MapTo(&unsupport4{}), ShouldNotBeNil)
- })
-
- Convey("Map to omitempty field", func() {
- ts := new(testStruct)
- So(MapTo(ts, []byte(_CONF_DATA_STRUCT)), ShouldBeNil)
-
- So(ts.Omitted, ShouldEqual, true)
- })
-
- Convey("Map with shadows", func() {
- cfg, err := LoadSources(LoadOptions{AllowShadows: true}, []byte(_CONF_DATA_STRUCT))
- So(err, ShouldBeNil)
- ts := new(testStruct)
- So(cfg.MapTo(ts), ShouldBeNil)
-
- So(strings.Join(ts.Shadows, " "), ShouldEqual, "1 2 3 4")
- So(fmt.Sprintf("%v", ts.ShadowInts), ShouldEqual, "[1 2 3 4]")
- })
-
- Convey("Map from invalid data source", func() {
- So(MapTo(&testStruct{}, "hi"), ShouldNotBeNil)
- })
-
- Convey("Map to wrong types and gain default values", func() {
- cfg, err := Load([]byte(_INVALID_DATA_CONF_STRUCT))
- So(err, ShouldBeNil)
-
- t, err := time.Parse(time.RFC3339, "1993-10-07T20:17:05Z")
- So(err, ShouldBeNil)
- dv := &defaultValue{"Joe", 10, true, 1.25, t, []string{"HangZhou", "Boston"}}
- So(cfg.MapTo(dv), ShouldBeNil)
- So(dv.Name, ShouldEqual, "Joe")
- So(dv.Age, ShouldEqual, 10)
- So(dv.Male, ShouldBeTrue)
- So(dv.Money, ShouldEqual, 1.25)
- So(dv.Born.String(), ShouldEqual, t.String())
- So(strings.Join(dv.Cities, ","), ShouldEqual, "HangZhou,Boston")
- })
- })
-
- Convey("Reflect from struct", t, func() {
- type Embeded struct {
- Dates []time.Time `delim:"|"`
- Places []string
- Years []int
- Numbers []int64
- Ages []uint
- Populations []uint64
- Coordinates []float64
- None []int
- }
- type Author struct {
- Name string `ini:"NAME"`
- Male bool
- Age int
- Height uint
- GPA float64
- Date time.Time
- NeverMind string `ini:"-"`
- *Embeded `ini:"infos"`
- }
-
- t, err := time.Parse(time.RFC3339, "1993-10-07T20:17:05Z")
- So(err, ShouldBeNil)
- a := &Author{"Unknwon", true, 21, 100, 2.8, t, "",
- &Embeded{
- []time.Time{t, t},
- []string{"HangZhou", "Boston"},
- []int{1993, 1994},
- []int64{10010, 10086},
- []uint{18, 19},
- []uint64{12345678, 98765432},
- []float64{192.168, 10.11},
- []int{},
- }}
- cfg := Empty()
- So(ReflectFrom(cfg, a), ShouldBeNil)
-
- var buf bytes.Buffer
- _, err = cfg.WriteTo(&buf)
- So(err, ShouldBeNil)
- So(buf.String(), ShouldEqual, `NAME = Unknwon
-Male = true
-Age = 21
-Height = 100
-GPA = 2.8
-Date = 1993-10-07T20:17:05Z
-
-[infos]
-Dates = 1993-10-07T20:17:05Z|1993-10-07T20:17:05Z
-Places = HangZhou,Boston
-Years = 1993,1994
-Numbers = 10010,10086
-Ages = 18,19
-Populations = 12345678,98765432
-Coordinates = 192.168,10.11
-None =
-
-`)
-
- Convey("Reflect from non-point struct", func() {
- So(ReflectFrom(cfg, Author{}), ShouldNotBeNil)
- })
-
- Convey("Reflect from struct with omitempty", func() {
- cfg := Empty()
- type SpecialStruct struct {
- FirstName string `ini:"first_name"`
- LastName string `ini:"last_name"`
- JustOmitMe string `ini:"omitempty"`
- LastLogin time.Time `ini:"last_login,omitempty"`
- LastLogin2 time.Time `ini:",omitempty"`
- NotEmpty int `ini:"omitempty"`
- }
-
- So(ReflectFrom(cfg, &SpecialStruct{FirstName: "John", LastName: "Doe", NotEmpty: 9}), ShouldBeNil)
-
- var buf bytes.Buffer
- _, err = cfg.WriteTo(&buf)
- So(buf.String(), ShouldEqual, `first_name = John
-last_name = Doe
-omitempty = 9
-
-`)
- })
- })
-}
-
-type testMapper struct {
- PackageName string
-}
-
-func Test_NameGetter(t *testing.T) {
- Convey("Test name mappers", t, func() {
- So(MapToWithMapper(&testMapper{}, TitleUnderscore, []byte("packag_name=ini")), ShouldBeNil)
-
- cfg, err := Load([]byte("PACKAGE_NAME=ini"))
- So(err, ShouldBeNil)
- So(cfg, ShouldNotBeNil)
-
- cfg.NameMapper = AllCapsUnderscore
- tg := new(testMapper)
- So(cfg.MapTo(tg), ShouldBeNil)
- So(tg.PackageName, ShouldEqual, "ini")
- })
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-16-BE-BOM.ini b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-16-BE-BOM.ini
deleted file mode 100644
index c8bf82c8..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-16-BE-BOM.ini
+++ /dev/null
Binary files differ
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-16-LE-BOM.ini b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-16-LE-BOM.ini
deleted file mode 100644
index 27f62186..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-16-LE-BOM.ini
+++ /dev/null
Binary files differ
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-8-BOM.ini b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-8-BOM.ini
deleted file mode 100644
index 2ed0ac1d..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/UTF-8-BOM.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-[author]
-E-MAIL = u@gogs.io \ No newline at end of file
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/aicc.ini b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/aicc.ini
deleted file mode 100644
index 59a61970..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/aicc.ini
+++ /dev/null
@@ -1,11 +0,0 @@
-[Core]
- Lesson_Location = 87
-Lesson_Status = C
- Score = 3
-Time = 00:02:30
-
-[CORE_LESSON]
-my lesson state data – 1111111111111111111000000000000000001110000
-111111111111111111100000000000111000000000 – end my lesson state data
-[COMMENTS]
-<1><L.Slide#2> This slide has the fuel listed in the wrong units <e.1>
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/conf.ini b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/conf.ini
deleted file mode 100644
index f8e7ec89..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/go-ini/ini/testdata/conf.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-[author]
-E-MAIL = u@gogs.io \ No newline at end of file
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/.gitignore b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/.gitignore
deleted file mode 100644
index 531fcc11..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-jpgo
-jmespath-fuzz.zip
-cpu.out
-go-jmespath.test
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/.travis.yml b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/.travis.yml
deleted file mode 100644
index 1f980775..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/.travis.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-language: go
-
-sudo: false
-
-go:
- - 1.4
-
-install: go get -v -t ./...
-script: make test
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/LICENSE b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/LICENSE
deleted file mode 100644
index b03310a9..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright 2015 James Saryerwinnie
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/Makefile b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/Makefile
deleted file mode 100644
index a828d284..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/Makefile
+++ /dev/null
@@ -1,44 +0,0 @@
-
-CMD = jpgo
-
-help:
- @echo "Please use \`make <target>' where <target> is one of"
- @echo " test to run all the tests"
- @echo " build to build the library and jp executable"
- @echo " generate to run codegen"
-
-
-generate:
- go generate ./...
-
-build:
- rm -f $(CMD)
- go build ./...
- rm -f cmd/$(CMD)/$(CMD) && cd cmd/$(CMD)/ && go build ./...
- mv cmd/$(CMD)/$(CMD) .
-
-test:
- go test -v ./...
-
-check:
- go vet ./...
- @echo "golint ./..."
- @lint=`golint ./...`; \
- lint=`echo "$$lint" | grep -v "astnodetype_string.go" | grep -v "toktype_string.go"`; \
- echo "$$lint"; \
- if [ "$$lint" != "" ]; then exit 1; fi
-
-htmlc:
- go test -coverprofile="/tmp/jpcov" && go tool cover -html="/tmp/jpcov" && unlink /tmp/jpcov
-
-buildfuzz:
- go-fuzz-build github.com/jmespath/go-jmespath/fuzz
-
-fuzz: buildfuzz
- go-fuzz -bin=./jmespath-fuzz.zip -workdir=fuzz/testdata
-
-bench:
- go test -bench . -cpuprofile cpu.out
-
-pprof-cpu:
- go tool pprof ./go-jmespath.test ./cpu.out
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/README.md b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/README.md
deleted file mode 100644
index 187ef676..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/README.md
+++ /dev/null
@@ -1,7 +0,0 @@
-# go-jmespath - A JMESPath implementation in Go
-
-[![Build Status](https://img.shields.io/travis/jmespath/go-jmespath.svg)](https://travis-ci.org/jmespath/go-jmespath)
-
-
-
-See http://jmespath.org for more info.
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/api.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/api.go
deleted file mode 100644
index 9cfa988b..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/api.go
+++ /dev/null
@@ -1,49 +0,0 @@
-package jmespath
-
-import "strconv"
-
-// JmesPath is the epresentation of a compiled JMES path query. A JmesPath is
-// safe for concurrent use by multiple goroutines.
-type JMESPath struct {
- ast ASTNode
- intr *treeInterpreter
-}
-
-// Compile parses a JMESPath expression and returns, if successful, a JMESPath
-// object that can be used to match against data.
-func Compile(expression string) (*JMESPath, error) {
- parser := NewParser()
- ast, err := parser.Parse(expression)
- if err != nil {
- return nil, err
- }
- jmespath := &JMESPath{ast: ast, intr: newInterpreter()}
- return jmespath, nil
-}
-
-// MustCompile is like Compile but panics if the expression cannot be parsed.
-// It simplifies safe initialization of global variables holding compiled
-// JMESPaths.
-func MustCompile(expression string) *JMESPath {
- jmespath, err := Compile(expression)
- if err != nil {
- panic(`jmespath: Compile(` + strconv.Quote(expression) + `): ` + err.Error())
- }
- return jmespath
-}
-
-// Search evaluates a JMESPath expression against input data and returns the result.
-func (jp *JMESPath) Search(data interface{}) (interface{}, error) {
- return jp.intr.Execute(jp.ast, data)
-}
-
-// Search evaluates a JMESPath expression against input data and returns the result.
-func Search(expression string, data interface{}) (interface{}, error) {
- intr := newInterpreter()
- parser := NewParser()
- ast, err := parser.Parse(expression)
- if err != nil {
- return nil, err
- }
- return intr.Execute(ast, data)
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/api_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/api_test.go
deleted file mode 100644
index b0b106d3..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/api_test.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package jmespath
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestValidPrecompiledExpressionSearches(t *testing.T) {
- assert := assert.New(t)
- data := make(map[string]interface{})
- data["foo"] = "bar"
- precompiled, err := Compile("foo")
- assert.Nil(err)
- result, err := precompiled.Search(data)
- assert.Nil(err)
- assert.Equal("bar", result)
-}
-
-func TestInvalidPrecompileErrors(t *testing.T) {
- assert := assert.New(t)
- _, err := Compile("not a valid expression")
- assert.NotNil(err)
-}
-
-func TestInvalidMustCompilePanics(t *testing.T) {
- defer func() {
- r := recover()
- assert.NotNil(t, r)
- }()
- MustCompile("not a valid expression")
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/astnodetype_string.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/astnodetype_string.go
deleted file mode 100644
index 1cd2d239..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/astnodetype_string.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// generated by stringer -type astNodeType; DO NOT EDIT
-
-package jmespath
-
-import "fmt"
-
-const _astNodeType_name = "ASTEmptyASTComparatorASTCurrentNodeASTExpRefASTFunctionExpressionASTFieldASTFilterProjectionASTFlattenASTIdentityASTIndexASTIndexExpressionASTKeyValPairASTLiteralASTMultiSelectHashASTMultiSelectListASTOrExpressionASTAndExpressionASTNotExpressionASTPipeASTProjectionASTSubexpressionASTSliceASTValueProjection"
-
-var _astNodeType_index = [...]uint16{0, 8, 21, 35, 44, 65, 73, 92, 102, 113, 121, 139, 152, 162, 180, 198, 213, 229, 245, 252, 265, 281, 289, 307}
-
-func (i astNodeType) String() string {
- if i < 0 || i >= astNodeType(len(_astNodeType_index)-1) {
- return fmt.Sprintf("astNodeType(%d)", i)
- }
- return _astNodeType_name[_astNodeType_index[i]:_astNodeType_index[i+1]]
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/basic.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/basic.json
deleted file mode 100644
index d550e969..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/basic.json
+++ /dev/null
@@ -1,96 +0,0 @@
-[{
- "given":
- {"foo": {"bar": {"baz": "correct"}}},
- "cases": [
- {
- "expression": "foo",
- "result": {"bar": {"baz": "correct"}}
- },
- {
- "expression": "foo.bar",
- "result": {"baz": "correct"}
- },
- {
- "expression": "foo.bar.baz",
- "result": "correct"
- },
- {
- "expression": "foo\n.\nbar\n.baz",
- "result": "correct"
- },
- {
- "expression": "foo.bar.baz.bad",
- "result": null
- },
- {
- "expression": "foo.bar.bad",
- "result": null
- },
- {
- "expression": "foo.bad",
- "result": null
- },
- {
- "expression": "bad",
- "result": null
- },
- {
- "expression": "bad.morebad.morebad",
- "result": null
- }
- ]
-},
-{
- "given":
- {"foo": {"bar": ["one", "two", "three"]}},
- "cases": [
- {
- "expression": "foo",
- "result": {"bar": ["one", "two", "three"]}
- },
- {
- "expression": "foo.bar",
- "result": ["one", "two", "three"]
- }
- ]
-},
-{
- "given": ["one", "two", "three"],
- "cases": [
- {
- "expression": "one",
- "result": null
- },
- {
- "expression": "two",
- "result": null
- },
- {
- "expression": "three",
- "result": null
- },
- {
- "expression": "one.two",
- "result": null
- }
- ]
-},
-{
- "given":
- {"foo": {"1": ["one", "two", "three"], "-1": "bar"}},
- "cases": [
- {
- "expression": "foo.\"1\"",
- "result": ["one", "two", "three"]
- },
- {
- "expression": "foo.\"1\"[0]",
- "result": "one"
- },
- {
- "expression": "foo.\"-1\"",
- "result": "bar"
- }
- ]
-}
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/boolean.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/boolean.json
deleted file mode 100644
index e3fa196b..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/boolean.json
+++ /dev/null
@@ -1,257 +0,0 @@
-[
- {
- "given": {
- "outer": {
- "foo": "foo",
- "bar": "bar",
- "baz": "baz"
- }
- },
- "cases": [
- {
- "expression": "outer.foo || outer.bar",
- "result": "foo"
- },
- {
- "expression": "outer.foo||outer.bar",
- "result": "foo"
- },
- {
- "expression": "outer.bar || outer.baz",
- "result": "bar"
- },
- {
- "expression": "outer.bar||outer.baz",
- "result": "bar"
- },
- {
- "expression": "outer.bad || outer.foo",
- "result": "foo"
- },
- {
- "expression": "outer.bad||outer.foo",
- "result": "foo"
- },
- {
- "expression": "outer.foo || outer.bad",
- "result": "foo"
- },
- {
- "expression": "outer.foo||outer.bad",
- "result": "foo"
- },
- {
- "expression": "outer.bad || outer.alsobad",
- "result": null
- },
- {
- "expression": "outer.bad||outer.alsobad",
- "result": null
- }
- ]
- },
- {
- "given": {
- "outer": {
- "foo": "foo",
- "bool": false,
- "empty_list": [],
- "empty_string": ""
- }
- },
- "cases": [
- {
- "expression": "outer.empty_string || outer.foo",
- "result": "foo"
- },
- {
- "expression": "outer.nokey || outer.bool || outer.empty_list || outer.empty_string || outer.foo",
- "result": "foo"
- }
- ]
- },
- {
- "given": {
- "True": true,
- "False": false,
- "Number": 5,
- "EmptyList": [],
- "Zero": 0
- },
- "cases": [
- {
- "expression": "True && False",
- "result": false
- },
- {
- "expression": "False && True",
- "result": false
- },
- {
- "expression": "True && True",
- "result": true
- },
- {
- "expression": "False && False",
- "result": false
- },
- {
- "expression": "True && Number",
- "result": 5
- },
- {
- "expression": "Number && True",
- "result": true
- },
- {
- "expression": "Number && False",
- "result": false
- },
- {
- "expression": "Number && EmptyList",
- "result": []
- },
- {
- "expression": "Number && True",
- "result": true
- },
- {
- "expression": "EmptyList && True",
- "result": []
- },
- {
- "expression": "EmptyList && False",
- "result": []
- },
- {
- "expression": "True || False",
- "result": true
- },
- {
- "expression": "True || True",
- "result": true
- },
- {
- "expression": "False || True",
- "result": true
- },
- {
- "expression": "False || False",
- "result": false
- },
- {
- "expression": "Number || EmptyList",
- "result": 5
- },
- {
- "expression": "Number || True",
- "result": 5
- },
- {
- "expression": "Number || True && False",
- "result": 5
- },
- {
- "expression": "(Number || True) && False",
- "result": false
- },
- {
- "expression": "Number || (True && False)",
- "result": 5
- },
- {
- "expression": "!True",
- "result": false
- },
- {
- "expression": "!False",
- "result": true
- },
- {
- "expression": "!Number",
- "result": false
- },
- {
- "expression": "!EmptyList",
- "result": true
- },
- {
- "expression": "True && !False",
- "result": true
- },
- {
- "expression": "True && !EmptyList",
- "result": true
- },
- {
- "expression": "!False && !EmptyList",
- "result": true
- },
- {
- "expression": "!(True && False)",
- "result": true
- },
- {
- "expression": "!Zero",
- "result": false
- },
- {
- "expression": "!!Zero",
- "result": true
- }
- ]
- },
- {
- "given": {
- "one": 1,
- "two": 2,
- "three": 3
- },
- "cases": [
- {
- "expression": "one < two",
- "result": true
- },
- {
- "expression": "one <= two",
- "result": true
- },
- {
- "expression": "one == one",
- "result": true
- },
- {
- "expression": "one == two",
- "result": false
- },
- {
- "expression": "one > two",
- "result": false
- },
- {
- "expression": "one >= two",
- "result": false
- },
- {
- "expression": "one != two",
- "result": true
- },
- {
- "expression": "one < two && three > one",
- "result": true
- },
- {
- "expression": "one < two || three > one",
- "result": true
- },
- {
- "expression": "one < two || three < one",
- "result": true
- },
- {
- "expression": "two < one || three < one",
- "result": false
- }
- ]
- }
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/current.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/current.json
deleted file mode 100644
index 0c26248d..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/current.json
+++ /dev/null
@@ -1,25 +0,0 @@
-[
- {
- "given": {
- "foo": [{"name": "a"}, {"name": "b"}],
- "bar": {"baz": "qux"}
- },
- "cases": [
- {
- "expression": "@",
- "result": {
- "foo": [{"name": "a"}, {"name": "b"}],
- "bar": {"baz": "qux"}
- }
- },
- {
- "expression": "@.bar",
- "result": {"baz": "qux"}
- },
- {
- "expression": "@.foo[0]",
- "result": {"name": "a"}
- }
- ]
- }
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/escape.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/escape.json
deleted file mode 100644
index 4a62d951..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/escape.json
+++ /dev/null
@@ -1,46 +0,0 @@
-[{
- "given": {
- "foo.bar": "dot",
- "foo bar": "space",
- "foo\nbar": "newline",
- "foo\"bar": "doublequote",
- "c:\\\\windows\\path": "windows",
- "/unix/path": "unix",
- "\"\"\"": "threequotes",
- "bar": {"baz": "qux"}
- },
- "cases": [
- {
- "expression": "\"foo.bar\"",
- "result": "dot"
- },
- {
- "expression": "\"foo bar\"",
- "result": "space"
- },
- {
- "expression": "\"foo\\nbar\"",
- "result": "newline"
- },
- {
- "expression": "\"foo\\\"bar\"",
- "result": "doublequote"
- },
- {
- "expression": "\"c:\\\\\\\\windows\\\\path\"",
- "result": "windows"
- },
- {
- "expression": "\"/unix/path\"",
- "result": "unix"
- },
- {
- "expression": "\"\\\"\\\"\\\"\"",
- "result": "threequotes"
- },
- {
- "expression": "\"bar\".\"baz\"",
- "result": "qux"
- }
- ]
-}]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/filters.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/filters.json
deleted file mode 100644
index 5b9f52b1..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/filters.json
+++ /dev/null
@@ -1,468 +0,0 @@
-[
- {
- "given": {"foo": [{"name": "a"}, {"name": "b"}]},
- "cases": [
- {
- "comment": "Matching a literal",
- "expression": "foo[?name == 'a']",
- "result": [{"name": "a"}]
- }
- ]
- },
- {
- "given": {"foo": [0, 1], "bar": [2, 3]},
- "cases": [
- {
- "comment": "Matching a literal",
- "expression": "*[?[0] == `0`]",
- "result": [[], []]
- }
- ]
- },
- {
- "given": {"foo": [{"first": "foo", "last": "bar"},
- {"first": "foo", "last": "foo"},
- {"first": "foo", "last": "baz"}]},
- "cases": [
- {
- "comment": "Matching an expression",
- "expression": "foo[?first == last]",
- "result": [{"first": "foo", "last": "foo"}]
- },
- {
- "comment": "Verify projection created from filter",
- "expression": "foo[?first == last].first",
- "result": ["foo"]
- }
- ]
- },
- {
- "given": {"foo": [{"age": 20},
- {"age": 25},
- {"age": 30}]},
- "cases": [
- {
- "comment": "Greater than with a number",
- "expression": "foo[?age > `25`]",
- "result": [{"age": 30}]
- },
- {
- "expression": "foo[?age >= `25`]",
- "result": [{"age": 25}, {"age": 30}]
- },
- {
- "comment": "Greater than with a number",
- "expression": "foo[?age > `30`]",
- "result": []
- },
- {
- "comment": "Greater than with a number",
- "expression": "foo[?age < `25`]",
- "result": [{"age": 20}]
- },
- {
- "comment": "Greater than with a number",
- "expression": "foo[?age <= `25`]",
- "result": [{"age": 20}, {"age": 25}]
- },
- {
- "comment": "Greater than with a number",
- "expression": "foo[?age < `20`]",
- "result": []
- },
- {
- "expression": "foo[?age == `20`]",
- "result": [{"age": 20}]
- },
- {
- "expression": "foo[?age != `20`]",
- "result": [{"age": 25}, {"age": 30}]
- }
- ]
- },
- {
- "given": {"foo": [{"top": {"name": "a"}},
- {"top": {"name": "b"}}]},
- "cases": [
- {
- "comment": "Filter with subexpression",
- "expression": "foo[?top.name == 'a']",
- "result": [{"top": {"name": "a"}}]
- }
- ]
- },
- {
- "given": {"foo": [{"top": {"first": "foo", "last": "bar"}},
- {"top": {"first": "foo", "last": "foo"}},
- {"top": {"first": "foo", "last": "baz"}}]},
- "cases": [
- {
- "comment": "Matching an expression",
- "expression": "foo[?top.first == top.last]",
- "result": [{"top": {"first": "foo", "last": "foo"}}]
- },
- {
- "comment": "Matching a JSON array",
- "expression": "foo[?top == `{\"first\": \"foo\", \"last\": \"bar\"}`]",
- "result": [{"top": {"first": "foo", "last": "bar"}}]
- }
- ]
- },
- {
- "given": {"foo": [
- {"key": true},
- {"key": false},
- {"key": 0},
- {"key": 1},
- {"key": [0]},
- {"key": {"bar": [0]}},
- {"key": null},
- {"key": [1]},
- {"key": {"a":2}}
- ]},
- "cases": [
- {
- "expression": "foo[?key == `true`]",
- "result": [{"key": true}]
- },
- {
- "expression": "foo[?key == `false`]",
- "result": [{"key": false}]
- },
- {
- "expression": "foo[?key == `0`]",
- "result": [{"key": 0}]
- },
- {
- "expression": "foo[?key == `1`]",
- "result": [{"key": 1}]
- },
- {
- "expression": "foo[?key == `[0]`]",
- "result": [{"key": [0]}]
- },
- {
- "expression": "foo[?key == `{\"bar\": [0]}`]",
- "result": [{"key": {"bar": [0]}}]
- },
- {
- "expression": "foo[?key == `null`]",
- "result": [{"key": null}]
- },
- {
- "expression": "foo[?key == `[1]`]",
- "result": [{"key": [1]}]
- },
- {
- "expression": "foo[?key == `{\"a\":2}`]",
- "result": [{"key": {"a":2}}]
- },
- {
- "expression": "foo[?`true` == key]",
- "result": [{"key": true}]
- },
- {
- "expression": "foo[?`false` == key]",
- "result": [{"key": false}]
- },
- {
- "expression": "foo[?`0` == key]",
- "result": [{"key": 0}]
- },
- {
- "expression": "foo[?`1` == key]",
- "result": [{"key": 1}]
- },
- {
- "expression": "foo[?`[0]` == key]",
- "result": [{"key": [0]}]
- },
- {
- "expression": "foo[?`{\"bar\": [0]}` == key]",
- "result": [{"key": {"bar": [0]}}]
- },
- {
- "expression": "foo[?`null` == key]",
- "result": [{"key": null}]
- },
- {
- "expression": "foo[?`[1]` == key]",
- "result": [{"key": [1]}]
- },
- {
- "expression": "foo[?`{\"a\":2}` == key]",
- "result": [{"key": {"a":2}}]
- },
- {
- "expression": "foo[?key != `true`]",
- "result": [{"key": false}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?key != `false`]",
- "result": [{"key": true}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?key != `0`]",
- "result": [{"key": true}, {"key": false}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?key != `1`]",
- "result": [{"key": true}, {"key": false}, {"key": 0}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?key != `null`]",
- "result": [{"key": true}, {"key": false}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?key != `[1]`]",
- "result": [{"key": true}, {"key": false}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?key != `{\"a\":2}`]",
- "result": [{"key": true}, {"key": false}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}]
- },
- {
- "expression": "foo[?`true` != key]",
- "result": [{"key": false}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?`false` != key]",
- "result": [{"key": true}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?`0` != key]",
- "result": [{"key": true}, {"key": false}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?`1` != key]",
- "result": [{"key": true}, {"key": false}, {"key": 0}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?`null` != key]",
- "result": [{"key": true}, {"key": false}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": [1]}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?`[1]` != key]",
- "result": [{"key": true}, {"key": false}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": {"a":2}}]
- },
- {
- "expression": "foo[?`{\"a\":2}` != key]",
- "result": [{"key": true}, {"key": false}, {"key": 0}, {"key": 1}, {"key": [0]},
- {"key": {"bar": [0]}}, {"key": null}, {"key": [1]}]
- }
- ]
- },
- {
- "given": {"reservations": [
- {"instances": [
- {"foo": 1, "bar": 2}, {"foo": 1, "bar": 3},
- {"foo": 1, "bar": 2}, {"foo": 2, "bar": 1}]}]},
- "cases": [
- {
- "expression": "reservations[].instances[?bar==`1`]",
- "result": [[{"foo": 2, "bar": 1}]]
- },
- {
- "expression": "reservations[*].instances[?bar==`1`]",
- "result": [[{"foo": 2, "bar": 1}]]
- },
- {
- "expression": "reservations[].instances[?bar==`1`][]",
- "result": [{"foo": 2, "bar": 1}]
- }
- ]
- },
- {
- "given": {
- "baz": "other",
- "foo": [
- {"bar": 1}, {"bar": 2}, {"bar": 3}, {"bar": 4}, {"bar": 1, "baz": 2}
- ]
- },
- "cases": [
- {
- "expression": "foo[?bar==`1`].bar[0]",
- "result": []
- }
- ]
- },
- {
- "given": {
- "foo": [
- {"a": 1, "b": {"c": "x"}},
- {"a": 1, "b": {"c": "y"}},
- {"a": 1, "b": {"c": "z"}},
- {"a": 2, "b": {"c": "z"}},
- {"a": 1, "baz": 2}
- ]
- },
- "cases": [
- {
- "expression": "foo[?a==`1`].b.c",
- "result": ["x", "y", "z"]
- }
- ]
- },
- {
- "given": {"foo": [{"name": "a"}, {"name": "b"}, {"name": "c"}]},
- "cases": [
- {
- "comment": "Filter with or expression",
- "expression": "foo[?name == 'a' || name == 'b']",
- "result": [{"name": "a"}, {"name": "b"}]
- },
- {
- "expression": "foo[?name == 'a' || name == 'e']",
- "result": [{"name": "a"}]
- },
- {
- "expression": "foo[?name == 'a' || name == 'b' || name == 'c']",
- "result": [{"name": "a"}, {"name": "b"}, {"name": "c"}]
- }
- ]
- },
- {
- "given": {"foo": [{"a": 1, "b": 2}, {"a": 1, "b": 3}]},
- "cases": [
- {
- "comment": "Filter with and expression",
- "expression": "foo[?a == `1` && b == `2`]",
- "result": [{"a": 1, "b": 2}]
- },
- {
- "expression": "foo[?a == `1` && b == `4`]",
- "result": []
- }
- ]
- },
- {
- "given": {"foo": [{"a": 1, "b": 2, "c": 3}, {"a": 3, "b": 4}]},
- "cases": [
- {
- "comment": "Filter with Or and And expressions",
- "expression": "foo[?c == `3` || a == `1` && b == `4`]",
- "result": [{"a": 1, "b": 2, "c": 3}]
- },
- {
- "expression": "foo[?b == `2` || a == `3` && b == `4`]",
- "result": [{"a": 1, "b": 2, "c": 3}, {"a": 3, "b": 4}]
- },
- {
- "expression": "foo[?a == `3` && b == `4` || b == `2`]",
- "result": [{"a": 1, "b": 2, "c": 3}, {"a": 3, "b": 4}]
- },
- {
- "expression": "foo[?(a == `3` && b == `4`) || b == `2`]",
- "result": [{"a": 1, "b": 2, "c": 3}, {"a": 3, "b": 4}]
- },
- {
- "expression": "foo[?((a == `3` && b == `4`)) || b == `2`]",
- "result": [{"a": 1, "b": 2, "c": 3}, {"a": 3, "b": 4}]
- },
- {
- "expression": "foo[?a == `3` && (b == `4` || b == `2`)]",
- "result": [{"a": 3, "b": 4}]
- },
- {
- "expression": "foo[?a == `3` && ((b == `4` || b == `2`))]",
- "result": [{"a": 3, "b": 4}]
- }
- ]
- },
- {
- "given": {"foo": [{"a": 1, "b": 2, "c": 3}, {"a": 3, "b": 4}]},
- "cases": [
- {
- "comment": "Verify precedence of or/and expressions",
- "expression": "foo[?a == `1` || b ==`2` && c == `5`]",
- "result": [{"a": 1, "b": 2, "c": 3}]
- },
- {
- "comment": "Parentheses can alter precedence",
- "expression": "foo[?(a == `1` || b ==`2`) && c == `5`]",
- "result": []
- },
- {
- "comment": "Not expressions combined with and/or",
- "expression": "foo[?!(a == `1` || b ==`2`)]",
- "result": [{"a": 3, "b": 4}]
- }
- ]
- },
- {
- "given": {
- "foo": [
- {"key": true},
- {"key": false},
- {"key": []},
- {"key": {}},
- {"key": [0]},
- {"key": {"a": "b"}},
- {"key": 0},
- {"key": 1},
- {"key": null},
- {"notkey": true}
- ]
- },
- "cases": [
- {
- "comment": "Unary filter expression",
- "expression": "foo[?key]",
- "result": [
- {"key": true}, {"key": [0]}, {"key": {"a": "b"}},
- {"key": 0}, {"key": 1}
- ]
- },
- {
- "comment": "Unary not filter expression",
- "expression": "foo[?!key]",
- "result": [
- {"key": false}, {"key": []}, {"key": {}},
- {"key": null}, {"notkey": true}
- ]
- },
- {
- "comment": "Equality with null RHS",
- "expression": "foo[?key == `null`]",
- "result": [
- {"key": null}, {"notkey": true}
- ]
- }
- ]
- },
- {
- "given": {
- "foo": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- "cases": [
- {
- "comment": "Using @ in a filter expression",
- "expression": "foo[?@ < `5`]",
- "result": [0, 1, 2, 3, 4]
- },
- {
- "comment": "Using @ in a filter expression",
- "expression": "foo[?`5` > @]",
- "result": [0, 1, 2, 3, 4]
- },
- {
- "comment": "Using @ in a filter expression",
- "expression": "foo[?@ == @]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- }
- ]
- }
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/functions.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/functions.json
deleted file mode 100644
index 8b8db363..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/functions.json
+++ /dev/null
@@ -1,825 +0,0 @@
-[{
- "given":
- {
- "foo": -1,
- "zero": 0,
- "numbers": [-1, 3, 4, 5],
- "array": [-1, 3, 4, 5, "a", "100"],
- "strings": ["a", "b", "c"],
- "decimals": [1.01, 1.2, -1.5],
- "str": "Str",
- "false": false,
- "empty_list": [],
- "empty_hash": {},
- "objects": {"foo": "bar", "bar": "baz"},
- "null_key": null
- },
- "cases": [
- {
- "expression": "abs(foo)",
- "result": 1
- },
- {
- "expression": "abs(foo)",
- "result": 1
- },
- {
- "expression": "abs(str)",
- "error": "invalid-type"
- },
- {
- "expression": "abs(array[1])",
- "result": 3
- },
- {
- "expression": "abs(array[1])",
- "result": 3
- },
- {
- "expression": "abs(`false`)",
- "error": "invalid-type"
- },
- {
- "expression": "abs(`-24`)",
- "result": 24
- },
- {
- "expression": "abs(`-24`)",
- "result": 24
- },
- {
- "expression": "abs(`1`, `2`)",
- "error": "invalid-arity"
- },
- {
- "expression": "abs()",
- "error": "invalid-arity"
- },
- {
- "expression": "unknown_function(`1`, `2`)",
- "error": "unknown-function"
- },
- {
- "expression": "avg(numbers)",
- "result": 2.75
- },
- {
- "expression": "avg(array)",
- "error": "invalid-type"
- },
- {
- "expression": "avg('abc')",
- "error": "invalid-type"
- },
- {
- "expression": "avg(foo)",
- "error": "invalid-type"
- },
- {
- "expression": "avg(@)",
- "error": "invalid-type"
- },
- {
- "expression": "avg(strings)",
- "error": "invalid-type"
- },
- {
- "expression": "ceil(`1.2`)",
- "result": 2
- },
- {
- "expression": "ceil(decimals[0])",
- "result": 2
- },
- {
- "expression": "ceil(decimals[1])",
- "result": 2
- },
- {
- "expression": "ceil(decimals[2])",
- "result": -1
- },
- {
- "expression": "ceil('string')",
- "error": "invalid-type"
- },
- {
- "expression": "contains('abc', 'a')",
- "result": true
- },
- {
- "expression": "contains('abc', 'd')",
- "result": false
- },
- {
- "expression": "contains(`false`, 'd')",
- "error": "invalid-type"
- },
- {
- "expression": "contains(strings, 'a')",
- "result": true
- },
- {
- "expression": "contains(decimals, `1.2`)",
- "result": true
- },
- {
- "expression": "contains(decimals, `false`)",
- "result": false
- },
- {
- "expression": "ends_with(str, 'r')",
- "result": true
- },
- {
- "expression": "ends_with(str, 'tr')",
- "result": true
- },
- {
- "expression": "ends_with(str, 'Str')",
- "result": true
- },
- {
- "expression": "ends_with(str, 'SStr')",
- "result": false
- },
- {
- "expression": "ends_with(str, 'foo')",
- "result": false
- },
- {
- "expression": "ends_with(str, `0`)",
- "error": "invalid-type"
- },
- {
- "expression": "floor(`1.2`)",
- "result": 1
- },
- {
- "expression": "floor('string')",
- "error": "invalid-type"
- },
- {
- "expression": "floor(decimals[0])",
- "result": 1
- },
- {
- "expression": "floor(foo)",
- "result": -1
- },
- {
- "expression": "floor(str)",
- "error": "invalid-type"
- },
- {
- "expression": "length('abc')",
- "result": 3
- },
- {
- "expression": "length('✓foo')",
- "result": 4
- },
- {
- "expression": "length('')",
- "result": 0
- },
- {
- "expression": "length(@)",
- "result": 12
- },
- {
- "expression": "length(strings[0])",
- "result": 1
- },
- {
- "expression": "length(str)",
- "result": 3
- },
- {
- "expression": "length(array)",
- "result": 6
- },
- {
- "expression": "length(objects)",
- "result": 2
- },
- {
- "expression": "length(`false`)",
- "error": "invalid-type"
- },
- {
- "expression": "length(foo)",
- "error": "invalid-type"
- },
- {
- "expression": "length(strings[0])",
- "result": 1
- },
- {
- "expression": "max(numbers)",
- "result": 5
- },
- {
- "expression": "max(decimals)",
- "result": 1.2
- },
- {
- "expression": "max(strings)",
- "result": "c"
- },
- {
- "expression": "max(abc)",
- "error": "invalid-type"
- },
- {
- "expression": "max(array)",
- "error": "invalid-type"
- },
- {
- "expression": "max(decimals)",
- "result": 1.2
- },
- {
- "expression": "max(empty_list)",
- "result": null
- },
- {
- "expression": "merge(`{}`)",
- "result": {}
- },
- {
- "expression": "merge(`{}`, `{}`)",
- "result": {}
- },
- {
- "expression": "merge(`{\"a\": 1}`, `{\"b\": 2}`)",
- "result": {"a": 1, "b": 2}
- },
- {
- "expression": "merge(`{\"a\": 1}`, `{\"a\": 2}`)",
- "result": {"a": 2}
- },
- {
- "expression": "merge(`{\"a\": 1, \"b\": 2}`, `{\"a\": 2, \"c\": 3}`, `{\"d\": 4}`)",
- "result": {"a": 2, "b": 2, "c": 3, "d": 4}
- },
- {
- "expression": "min(numbers)",
- "result": -1
- },
- {
- "expression": "min(decimals)",
- "result": -1.5
- },
- {
- "expression": "min(abc)",
- "error": "invalid-type"
- },
- {
- "expression": "min(array)",
- "error": "invalid-type"
- },
- {
- "expression": "min(empty_list)",
- "result": null
- },
- {
- "expression": "min(decimals)",
- "result": -1.5
- },
- {
- "expression": "min(strings)",
- "result": "a"
- },
- {
- "expression": "type('abc')",
- "result": "string"
- },
- {
- "expression": "type(`1.0`)",
- "result": "number"
- },
- {
- "expression": "type(`2`)",
- "result": "number"
- },
- {
- "expression": "type(`true`)",
- "result": "boolean"
- },
- {
- "expression": "type(`false`)",
- "result": "boolean"
- },
- {
- "expression": "type(`null`)",
- "result": "null"
- },
- {
- "expression": "type(`[0]`)",
- "result": "array"
- },
- {
- "expression": "type(`{\"a\": \"b\"}`)",
- "result": "object"
- },
- {
- "expression": "type(@)",
- "result": "object"
- },
- {
- "expression": "sort(keys(objects))",
- "result": ["bar", "foo"]
- },
- {
- "expression": "keys(foo)",
- "error": "invalid-type"
- },
- {
- "expression": "keys(strings)",
- "error": "invalid-type"
- },
- {
- "expression": "keys(`false`)",
- "error": "invalid-type"
- },
- {
- "expression": "sort(values(objects))",
- "result": ["bar", "baz"]
- },
- {
- "expression": "keys(empty_hash)",
- "result": []
- },
- {
- "expression": "values(foo)",
- "error": "invalid-type"
- },
- {
- "expression": "join(', ', strings)",
- "result": "a, b, c"
- },
- {
- "expression": "join(', ', strings)",
- "result": "a, b, c"
- },
- {
- "expression": "join(',', `[\"a\", \"b\"]`)",
- "result": "a,b"
- },
- {
- "expression": "join(',', `[\"a\", 0]`)",
- "error": "invalid-type"
- },
- {
- "expression": "join(', ', str)",
- "error": "invalid-type"
- },
- {
- "expression": "join('|', strings)",
- "result": "a|b|c"
- },
- {
- "expression": "join(`2`, strings)",
- "error": "invalid-type"
- },
- {
- "expression": "join('|', decimals)",
- "error": "invalid-type"
- },
- {
- "expression": "join('|', decimals[].to_string(@))",
- "result": "1.01|1.2|-1.5"
- },
- {
- "expression": "join('|', empty_list)",
- "result": ""
- },
- {
- "expression": "reverse(numbers)",
- "result": [5, 4, 3, -1]
- },
- {
- "expression": "reverse(array)",
- "result": ["100", "a", 5, 4, 3, -1]
- },
- {
- "expression": "reverse(`[]`)",
- "result": []
- },
- {
- "expression": "reverse('')",
- "result": ""
- },
- {
- "expression": "reverse('hello world')",
- "result": "dlrow olleh"
- },
- {
- "expression": "starts_with(str, 'S')",
- "result": true
- },
- {
- "expression": "starts_with(str, 'St')",
- "result": true
- },
- {
- "expression": "starts_with(str, 'Str')",
- "result": true
- },
- {
- "expression": "starts_with(str, 'String')",
- "result": false
- },
- {
- "expression": "starts_with(str, `0`)",
- "error": "invalid-type"
- },
- {
- "expression": "sum(numbers)",
- "result": 11
- },
- {
- "expression": "sum(decimals)",
- "result": 0.71
- },
- {
- "expression": "sum(array)",
- "error": "invalid-type"
- },
- {
- "expression": "sum(array[].to_number(@))",
- "result": 111
- },
- {
- "expression": "sum(`[]`)",
- "result": 0
- },
- {
- "expression": "to_array('foo')",
- "result": ["foo"]
- },
- {
- "expression": "to_array(`0`)",
- "result": [0]
- },
- {
- "expression": "to_array(objects)",
- "result": [{"foo": "bar", "bar": "baz"}]
- },
- {
- "expression": "to_array(`[1, 2, 3]`)",
- "result": [1, 2, 3]
- },
- {
- "expression": "to_array(false)",
- "result": [false]
- },
- {
- "expression": "to_string('foo')",
- "result": "foo"
- },
- {
- "expression": "to_string(`1.2`)",
- "result": "1.2"
- },
- {
- "expression": "to_string(`[0, 1]`)",
- "result": "[0,1]"
- },
- {
- "expression": "to_number('1.0')",
- "result": 1.0
- },
- {
- "expression": "to_number('1.1')",
- "result": 1.1
- },
- {
- "expression": "to_number('4')",
- "result": 4
- },
- {
- "expression": "to_number('notanumber')",
- "result": null
- },
- {
- "expression": "to_number(`false`)",
- "result": null
- },
- {
- "expression": "to_number(`null`)",
- "result": null
- },
- {
- "expression": "to_number(`[0]`)",
- "result": null
- },
- {
- "expression": "to_number(`{\"foo\": 0}`)",
- "result": null
- },
- {
- "expression": "\"to_string\"(`1.0`)",
- "error": "syntax"
- },
- {
- "expression": "sort(numbers)",
- "result": [-1, 3, 4, 5]
- },
- {
- "expression": "sort(strings)",
- "result": ["a", "b", "c"]
- },
- {
- "expression": "sort(decimals)",
- "result": [-1.5, 1.01, 1.2]
- },
- {
- "expression": "sort(array)",
- "error": "invalid-type"
- },
- {
- "expression": "sort(abc)",
- "error": "invalid-type"
- },
- {
- "expression": "sort(empty_list)",
- "result": []
- },
- {
- "expression": "sort(@)",
- "error": "invalid-type"
- },
- {
- "expression": "not_null(unknown_key, str)",
- "result": "Str"
- },
- {
- "expression": "not_null(unknown_key, foo.bar, empty_list, str)",
- "result": []
- },
- {
- "expression": "not_null(unknown_key, null_key, empty_list, str)",
- "result": []
- },
- {
- "expression": "not_null(all, expressions, are_null)",
- "result": null
- },
- {
- "expression": "not_null()",
- "error": "invalid-arity"
- },
- {
- "description": "function projection on single arg function",
- "expression": "numbers[].to_string(@)",
- "result": ["-1", "3", "4", "5"]
- },
- {
- "description": "function projection on single arg function",
- "expression": "array[].to_number(@)",
- "result": [-1, 3, 4, 5, 100]
- }
- ]
-}, {
- "given":
- {
- "foo": [
- {"b": "b", "a": "a"},
- {"c": "c", "b": "b"},
- {"d": "d", "c": "c"},
- {"e": "e", "d": "d"},
- {"f": "f", "e": "e"}
- ]
- },
- "cases": [
- {
- "description": "function projection on variadic function",
- "expression": "foo[].not_null(f, e, d, c, b, a)",
- "result": ["b", "c", "d", "e", "f"]
- }
- ]
-}, {
- "given":
- {
- "people": [
- {"age": 20, "age_str": "20", "bool": true, "name": "a", "extra": "foo"},
- {"age": 40, "age_str": "40", "bool": false, "name": "b", "extra": "bar"},
- {"age": 30, "age_str": "30", "bool": true, "name": "c"},
- {"age": 50, "age_str": "50", "bool": false, "name": "d"},
- {"age": 10, "age_str": "10", "bool": true, "name": 3}
- ]
- },
- "cases": [
- {
- "description": "sort by field expression",
- "expression": "sort_by(people, &age)",
- "result": [
- {"age": 10, "age_str": "10", "bool": true, "name": 3},
- {"age": 20, "age_str": "20", "bool": true, "name": "a", "extra": "foo"},
- {"age": 30, "age_str": "30", "bool": true, "name": "c"},
- {"age": 40, "age_str": "40", "bool": false, "name": "b", "extra": "bar"},
- {"age": 50, "age_str": "50", "bool": false, "name": "d"}
- ]
- },
- {
- "expression": "sort_by(people, &age_str)",
- "result": [
- {"age": 10, "age_str": "10", "bool": true, "name": 3},
- {"age": 20, "age_str": "20", "bool": true, "name": "a", "extra": "foo"},
- {"age": 30, "age_str": "30", "bool": true, "name": "c"},
- {"age": 40, "age_str": "40", "bool": false, "name": "b", "extra": "bar"},
- {"age": 50, "age_str": "50", "bool": false, "name": "d"}
- ]
- },
- {
- "description": "sort by function expression",
- "expression": "sort_by(people, &to_number(age_str))",
- "result": [
- {"age": 10, "age_str": "10", "bool": true, "name": 3},
- {"age": 20, "age_str": "20", "bool": true, "name": "a", "extra": "foo"},
- {"age": 30, "age_str": "30", "bool": true, "name": "c"},
- {"age": 40, "age_str": "40", "bool": false, "name": "b", "extra": "bar"},
- {"age": 50, "age_str": "50", "bool": false, "name": "d"}
- ]
- },
- {
- "description": "function projection on sort_by function",
- "expression": "sort_by(people, &age)[].name",
- "result": [3, "a", "c", "b", "d"]
- },
- {
- "expression": "sort_by(people, &extra)",
- "error": "invalid-type"
- },
- {
- "expression": "sort_by(people, &bool)",
- "error": "invalid-type"
- },
- {
- "expression": "sort_by(people, &name)",
- "error": "invalid-type"
- },
- {
- "expression": "sort_by(people, name)",
- "error": "invalid-type"
- },
- {
- "expression": "sort_by(people, &age)[].extra",
- "result": ["foo", "bar"]
- },
- {
- "expression": "sort_by(`[]`, &age)",
- "result": []
- },
- {
- "expression": "max_by(people, &age)",
- "result": {"age": 50, "age_str": "50", "bool": false, "name": "d"}
- },
- {
- "expression": "max_by(people, &age_str)",
- "result": {"age": 50, "age_str": "50", "bool": false, "name": "d"}
- },
- {
- "expression": "max_by(people, &bool)",
- "error": "invalid-type"
- },
- {
- "expression": "max_by(people, &extra)",
- "error": "invalid-type"
- },
- {
- "expression": "max_by(people, &to_number(age_str))",
- "result": {"age": 50, "age_str": "50", "bool": false, "name": "d"}
- },
- {
- "expression": "min_by(people, &age)",
- "result": {"age": 10, "age_str": "10", "bool": true, "name": 3}
- },
- {
- "expression": "min_by(people, &age_str)",
- "result": {"age": 10, "age_str": "10", "bool": true, "name": 3}
- },
- {
- "expression": "min_by(people, &bool)",
- "error": "invalid-type"
- },
- {
- "expression": "min_by(people, &extra)",
- "error": "invalid-type"
- },
- {
- "expression": "min_by(people, &to_number(age_str))",
- "result": {"age": 10, "age_str": "10", "bool": true, "name": 3}
- }
- ]
-}, {
- "given":
- {
- "people": [
- {"age": 10, "order": "1"},
- {"age": 10, "order": "2"},
- {"age": 10, "order": "3"},
- {"age": 10, "order": "4"},
- {"age": 10, "order": "5"},
- {"age": 10, "order": "6"},
- {"age": 10, "order": "7"},
- {"age": 10, "order": "8"},
- {"age": 10, "order": "9"},
- {"age": 10, "order": "10"},
- {"age": 10, "order": "11"}
- ]
- },
- "cases": [
- {
- "description": "stable sort order",
- "expression": "sort_by(people, &age)",
- "result": [
- {"age": 10, "order": "1"},
- {"age": 10, "order": "2"},
- {"age": 10, "order": "3"},
- {"age": 10, "order": "4"},
- {"age": 10, "order": "5"},
- {"age": 10, "order": "6"},
- {"age": 10, "order": "7"},
- {"age": 10, "order": "8"},
- {"age": 10, "order": "9"},
- {"age": 10, "order": "10"},
- {"age": 10, "order": "11"}
- ]
- }
- ]
-}, {
- "given":
- {
- "people": [
- {"a": 10, "b": 1, "c": "z"},
- {"a": 10, "b": 2, "c": null},
- {"a": 10, "b": 3},
- {"a": 10, "b": 4, "c": "z"},
- {"a": 10, "b": 5, "c": null},
- {"a": 10, "b": 6},
- {"a": 10, "b": 7, "c": "z"},
- {"a": 10, "b": 8, "c": null},
- {"a": 10, "b": 9}
- ],
- "empty": []
- },
- "cases": [
- {
- "expression": "map(&a, people)",
- "result": [10, 10, 10, 10, 10, 10, 10, 10, 10]
- },
- {
- "expression": "map(&c, people)",
- "result": ["z", null, null, "z", null, null, "z", null, null]
- },
- {
- "expression": "map(&a, badkey)",
- "error": "invalid-type"
- },
- {
- "expression": "map(&foo, empty)",
- "result": []
- }
- ]
-}, {
- "given": {
- "array": [
- {
- "foo": {"bar": "yes1"}
- },
- {
- "foo": {"bar": "yes2"}
- },
- {
- "foo1": {"bar": "no"}
- }
- ]},
- "cases": [
- {
- "expression": "map(&foo.bar, array)",
- "result": ["yes1", "yes2", null]
- },
- {
- "expression": "map(&foo1.bar, array)",
- "result": [null, null, "no"]
- },
- {
- "expression": "map(&foo.bar.baz, array)",
- "result": [null, null, null]
- }
- ]
-}, {
- "given": {
- "array": [[1, 2, 3, [4]], [5, 6, 7, [8, 9]]]
- },
- "cases": [
- {
- "expression": "map(&[], array)",
- "result": [[1, 2, 3, 4], [5, 6, 7, 8, 9]]
- }
- ]
-}
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/identifiers.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/identifiers.json
deleted file mode 100644
index 7998a41a..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/identifiers.json
+++ /dev/null
@@ -1,1377 +0,0 @@
-[
- {
- "given": {
- "__L": true
- },
- "cases": [
- {
- "expression": "__L",
- "result": true
- }
- ]
- },
- {
- "given": {
- "!\r": true
- },
- "cases": [
- {
- "expression": "\"!\\r\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Y_1623": true
- },
- "cases": [
- {
- "expression": "Y_1623",
- "result": true
- }
- ]
- },
- {
- "given": {
- "x": true
- },
- "cases": [
- {
- "expression": "x",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\tF\uCebb": true
- },
- "cases": [
- {
- "expression": "\"\\tF\\uCebb\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- " \t": true
- },
- "cases": [
- {
- "expression": "\" \\t\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- " ": true
- },
- "cases": [
- {
- "expression": "\" \"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "v2": true
- },
- "cases": [
- {
- "expression": "v2",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\t": true
- },
- "cases": [
- {
- "expression": "\"\\t\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_X": true
- },
- "cases": [
- {
- "expression": "_X",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\t4\ud9da\udd15": true
- },
- "cases": [
- {
- "expression": "\"\\t4\\ud9da\\udd15\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "v24_W": true
- },
- "cases": [
- {
- "expression": "v24_W",
- "result": true
- }
- ]
- },
- {
- "given": {
- "H": true
- },
- "cases": [
- {
- "expression": "\"H\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\f": true
- },
- "cases": [
- {
- "expression": "\"\\f\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "E4": true
- },
- "cases": [
- {
- "expression": "\"E4\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "!": true
- },
- "cases": [
- {
- "expression": "\"!\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "tM": true
- },
- "cases": [
- {
- "expression": "tM",
- "result": true
- }
- ]
- },
- {
- "given": {
- " [": true
- },
- "cases": [
- {
- "expression": "\" [\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "R!": true
- },
- "cases": [
- {
- "expression": "\"R!\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_6W": true
- },
- "cases": [
- {
- "expression": "_6W",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\uaBA1\r": true
- },
- "cases": [
- {
- "expression": "\"\\uaBA1\\r\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "tL7": true
- },
- "cases": [
- {
- "expression": "tL7",
- "result": true
- }
- ]
- },
- {
- "given": {
- "<<U\t": true
- },
- "cases": [
- {
- "expression": "\"<<U\\t\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\ubBcE\ufAfB": true
- },
- "cases": [
- {
- "expression": "\"\\ubBcE\\ufAfB\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "sNA_": true
- },
- "cases": [
- {
- "expression": "sNA_",
- "result": true
- }
- ]
- },
- {
- "given": {
- "9": true
- },
- "cases": [
- {
- "expression": "\"9\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\\\b\ud8cb\udc83": true
- },
- "cases": [
- {
- "expression": "\"\\\\\\b\\ud8cb\\udc83\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "r": true
- },
- "cases": [
- {
- "expression": "\"r\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Q": true
- },
- "cases": [
- {
- "expression": "Q",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_Q__7GL8": true
- },
- "cases": [
- {
- "expression": "_Q__7GL8",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\\": true
- },
- "cases": [
- {
- "expression": "\"\\\\\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "RR9_": true
- },
- "cases": [
- {
- "expression": "RR9_",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\r\f:": true
- },
- "cases": [
- {
- "expression": "\"\\r\\f:\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "r7": true
- },
- "cases": [
- {
- "expression": "r7",
- "result": true
- }
- ]
- },
- {
- "given": {
- "-": true
- },
- "cases": [
- {
- "expression": "\"-\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "p9": true
- },
- "cases": [
- {
- "expression": "p9",
- "result": true
- }
- ]
- },
- {
- "given": {
- "__": true
- },
- "cases": [
- {
- "expression": "__",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\b\t": true
- },
- "cases": [
- {
- "expression": "\"\\b\\t\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "O_": true
- },
- "cases": [
- {
- "expression": "O_",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_r_8": true
- },
- "cases": [
- {
- "expression": "_r_8",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_j": true
- },
- "cases": [
- {
- "expression": "_j",
- "result": true
- }
- ]
- },
- {
- "given": {
- ":": true
- },
- "cases": [
- {
- "expression": "\":\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\rB": true
- },
- "cases": [
- {
- "expression": "\"\\rB\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Obf": true
- },
- "cases": [
- {
- "expression": "Obf",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\n": true
- },
- "cases": [
- {
- "expression": "\"\\n\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\f\udb54\udf33": true
- },
- "cases": [
- {
- "expression": "\"\\f\udb54\udf33\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\\\u4FDc": true
- },
- "cases": [
- {
- "expression": "\"\\\\\\u4FDc\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\r": true
- },
- "cases": [
- {
- "expression": "\"\\r\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "m_": true
- },
- "cases": [
- {
- "expression": "m_",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\r\fB ": true
- },
- "cases": [
- {
- "expression": "\"\\r\\fB \"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "+\"\"": true
- },
- "cases": [
- {
- "expression": "\"+\\\"\\\"\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Mg": true
- },
- "cases": [
- {
- "expression": "Mg",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\"!\/": true
- },
- "cases": [
- {
- "expression": "\"\\\"!\\/\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "7\"": true
- },
- "cases": [
- {
- "expression": "\"7\\\"\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\\\udb3a\udca4S": true
- },
- "cases": [
- {
- "expression": "\"\\\\\udb3a\udca4S\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\"": true
- },
- "cases": [
- {
- "expression": "\"\\\"\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Kl": true
- },
- "cases": [
- {
- "expression": "Kl",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\b\b": true
- },
- "cases": [
- {
- "expression": "\"\\b\\b\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- ">": true
- },
- "cases": [
- {
- "expression": "\">\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "hvu": true
- },
- "cases": [
- {
- "expression": "hvu",
- "result": true
- }
- ]
- },
- {
- "given": {
- "; !": true
- },
- "cases": [
- {
- "expression": "\"; !\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "hU": true
- },
- "cases": [
- {
- "expression": "hU",
- "result": true
- }
- ]
- },
- {
- "given": {
- "!I\n\/": true
- },
- "cases": [
- {
- "expression": "\"!I\\n\\/\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\uEEbF": true
- },
- "cases": [
- {
- "expression": "\"\\uEEbF\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "U)\t": true
- },
- "cases": [
- {
- "expression": "\"U)\\t\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "fa0_9": true
- },
- "cases": [
- {
- "expression": "fa0_9",
- "result": true
- }
- ]
- },
- {
- "given": {
- "/": true
- },
- "cases": [
- {
- "expression": "\"/\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Gy": true
- },
- "cases": [
- {
- "expression": "Gy",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\b": true
- },
- "cases": [
- {
- "expression": "\"\\b\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "<": true
- },
- "cases": [
- {
- "expression": "\"<\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\t": true
- },
- "cases": [
- {
- "expression": "\"\\t\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\t&\\\r": true
- },
- "cases": [
- {
- "expression": "\"\\t&\\\\\\r\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "#": true
- },
- "cases": [
- {
- "expression": "\"#\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "B__": true
- },
- "cases": [
- {
- "expression": "B__",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\nS \n": true
- },
- "cases": [
- {
- "expression": "\"\\nS \\n\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Bp": true
- },
- "cases": [
- {
- "expression": "Bp",
- "result": true
- }
- ]
- },
- {
- "given": {
- ",\t;": true
- },
- "cases": [
- {
- "expression": "\",\\t;\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "B_q": true
- },
- "cases": [
- {
- "expression": "B_q",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\/+\t\n\b!Z": true
- },
- "cases": [
- {
- "expression": "\"\\/+\\t\\n\\b!Z\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\udadd\udfc7\\ueFAc": true
- },
- "cases": [
- {
- "expression": "\"\udadd\udfc7\\\\ueFAc\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- ":\f": true
- },
- "cases": [
- {
- "expression": "\":\\f\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\/": true
- },
- "cases": [
- {
- "expression": "\"\\/\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_BW_6Hg_Gl": true
- },
- "cases": [
- {
- "expression": "_BW_6Hg_Gl",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\udbcf\udc02": true
- },
- "cases": [
- {
- "expression": "\"\udbcf\udc02\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "zs1DC": true
- },
- "cases": [
- {
- "expression": "zs1DC",
- "result": true
- }
- ]
- },
- {
- "given": {
- "__434": true
- },
- "cases": [
- {
- "expression": "__434",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\udb94\udd41": true
- },
- "cases": [
- {
- "expression": "\"\udb94\udd41\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Z_5": true
- },
- "cases": [
- {
- "expression": "Z_5",
- "result": true
- }
- ]
- },
- {
- "given": {
- "z_M_": true
- },
- "cases": [
- {
- "expression": "z_M_",
- "result": true
- }
- ]
- },
- {
- "given": {
- "YU_2": true
- },
- "cases": [
- {
- "expression": "YU_2",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_0": true
- },
- "cases": [
- {
- "expression": "_0",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\b+": true
- },
- "cases": [
- {
- "expression": "\"\\b+\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\"": true
- },
- "cases": [
- {
- "expression": "\"\\\"\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "D7": true
- },
- "cases": [
- {
- "expression": "D7",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_62L": true
- },
- "cases": [
- {
- "expression": "_62L",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\tK\t": true
- },
- "cases": [
- {
- "expression": "\"\\tK\\t\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\n\\\f": true
- },
- "cases": [
- {
- "expression": "\"\\n\\\\\\f\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "I_": true
- },
- "cases": [
- {
- "expression": "I_",
- "result": true
- }
- ]
- },
- {
- "given": {
- "W_a0_": true
- },
- "cases": [
- {
- "expression": "W_a0_",
- "result": true
- }
- ]
- },
- {
- "given": {
- "BQ": true
- },
- "cases": [
- {
- "expression": "BQ",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\tX$\uABBb": true
- },
- "cases": [
- {
- "expression": "\"\\tX$\\uABBb\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Z9": true
- },
- "cases": [
- {
- "expression": "Z9",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\b%\"\uda38\udd0f": true
- },
- "cases": [
- {
- "expression": "\"\\b%\\\"\uda38\udd0f\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_F": true
- },
- "cases": [
- {
- "expression": "_F",
- "result": true
- }
- ]
- },
- {
- "given": {
- "!,": true
- },
- "cases": [
- {
- "expression": "\"!,\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\"!": true
- },
- "cases": [
- {
- "expression": "\"\\\"!\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "Hh": true
- },
- "cases": [
- {
- "expression": "Hh",
- "result": true
- }
- ]
- },
- {
- "given": {
- "&": true
- },
- "cases": [
- {
- "expression": "\"&\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "9\r\\R": true
- },
- "cases": [
- {
- "expression": "\"9\\r\\\\R\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "M_k": true
- },
- "cases": [
- {
- "expression": "M_k",
- "result": true
- }
- ]
- },
- {
- "given": {
- "!\b\n\udb06\ude52\"\"": true
- },
- "cases": [
- {
- "expression": "\"!\\b\\n\udb06\ude52\\\"\\\"\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "6": true
- },
- "cases": [
- {
- "expression": "\"6\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_7": true
- },
- "cases": [
- {
- "expression": "_7",
- "result": true
- }
- ]
- },
- {
- "given": {
- "0": true
- },
- "cases": [
- {
- "expression": "\"0\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\\8\\": true
- },
- "cases": [
- {
- "expression": "\"\\\\8\\\\\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "b7eo": true
- },
- "cases": [
- {
- "expression": "b7eo",
- "result": true
- }
- ]
- },
- {
- "given": {
- "xIUo9": true
- },
- "cases": [
- {
- "expression": "xIUo9",
- "result": true
- }
- ]
- },
- {
- "given": {
- "5": true
- },
- "cases": [
- {
- "expression": "\"5\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "?": true
- },
- "cases": [
- {
- "expression": "\"?\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "sU": true
- },
- "cases": [
- {
- "expression": "sU",
- "result": true
- }
- ]
- },
- {
- "given": {
- "VH2&H\\\/": true
- },
- "cases": [
- {
- "expression": "\"VH2&H\\\\\\/\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_C": true
- },
- "cases": [
- {
- "expression": "_C",
- "result": true
- }
- ]
- },
- {
- "given": {
- "_": true
- },
- "cases": [
- {
- "expression": "_",
- "result": true
- }
- ]
- },
- {
- "given": {
- "<\t": true
- },
- "cases": [
- {
- "expression": "\"<\\t\"",
- "result": true
- }
- ]
- },
- {
- "given": {
- "\uD834\uDD1E": true
- },
- "cases": [
- {
- "expression": "\"\\uD834\\uDD1E\"",
- "result": true
- }
- ]
- }
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/indices.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/indices.json
deleted file mode 100644
index aa03b35d..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/indices.json
+++ /dev/null
@@ -1,346 +0,0 @@
-[{
- "given":
- {"foo": {"bar": ["zero", "one", "two"]}},
- "cases": [
- {
- "expression": "foo.bar[0]",
- "result": "zero"
- },
- {
- "expression": "foo.bar[1]",
- "result": "one"
- },
- {
- "expression": "foo.bar[2]",
- "result": "two"
- },
- {
- "expression": "foo.bar[3]",
- "result": null
- },
- {
- "expression": "foo.bar[-1]",
- "result": "two"
- },
- {
- "expression": "foo.bar[-2]",
- "result": "one"
- },
- {
- "expression": "foo.bar[-3]",
- "result": "zero"
- },
- {
- "expression": "foo.bar[-4]",
- "result": null
- }
- ]
-},
-{
- "given":
- {"foo": [{"bar": "one"}, {"bar": "two"}, {"bar": "three"}, {"notbar": "four"}]},
- "cases": [
- {
- "expression": "foo.bar",
- "result": null
- },
- {
- "expression": "foo[0].bar",
- "result": "one"
- },
- {
- "expression": "foo[1].bar",
- "result": "two"
- },
- {
- "expression": "foo[2].bar",
- "result": "three"
- },
- {
- "expression": "foo[3].notbar",
- "result": "four"
- },
- {
- "expression": "foo[3].bar",
- "result": null
- },
- {
- "expression": "foo[0]",
- "result": {"bar": "one"}
- },
- {
- "expression": "foo[1]",
- "result": {"bar": "two"}
- },
- {
- "expression": "foo[2]",
- "result": {"bar": "three"}
- },
- {
- "expression": "foo[3]",
- "result": {"notbar": "four"}
- },
- {
- "expression": "foo[4]",
- "result": null
- }
- ]
-},
-{
- "given": [
- "one", "two", "three"
- ],
- "cases": [
- {
- "expression": "[0]",
- "result": "one"
- },
- {
- "expression": "[1]",
- "result": "two"
- },
- {
- "expression": "[2]",
- "result": "three"
- },
- {
- "expression": "[-1]",
- "result": "three"
- },
- {
- "expression": "[-2]",
- "result": "two"
- },
- {
- "expression": "[-3]",
- "result": "one"
- }
- ]
-},
-{
- "given": {"reservations": [
- {"instances": [{"foo": 1}, {"foo": 2}]}
- ]},
- "cases": [
- {
- "expression": "reservations[].instances[].foo",
- "result": [1, 2]
- },
- {
- "expression": "reservations[].instances[].bar",
- "result": []
- },
- {
- "expression": "reservations[].notinstances[].foo",
- "result": []
- },
- {
- "expression": "reservations[].notinstances[].foo",
- "result": []
- }
- ]
-},
-{
- "given": {"reservations": [{
- "instances": [
- {"foo": [{"bar": 1}, {"bar": 2}, {"notbar": 3}, {"bar": 4}]},
- {"foo": [{"bar": 5}, {"bar": 6}, {"notbar": [7]}, {"bar": 8}]},
- {"foo": "bar"},
- {"notfoo": [{"bar": 20}, {"bar": 21}, {"notbar": [7]}, {"bar": 22}]},
- {"bar": [{"baz": [1]}, {"baz": [2]}, {"baz": [3]}, {"baz": [4]}]},
- {"baz": [{"baz": [1, 2]}, {"baz": []}, {"baz": []}, {"baz": [3, 4]}]},
- {"qux": [{"baz": []}, {"baz": [1, 2, 3]}, {"baz": [4]}, {"baz": []}]}
- ],
- "otherkey": {"foo": [{"bar": 1}, {"bar": 2}, {"notbar": 3}, {"bar": 4}]}
- }, {
- "instances": [
- {"a": [{"bar": 1}, {"bar": 2}, {"notbar": 3}, {"bar": 4}]},
- {"b": [{"bar": 5}, {"bar": 6}, {"notbar": [7]}, {"bar": 8}]},
- {"c": "bar"},
- {"notfoo": [{"bar": 23}, {"bar": 24}, {"notbar": [7]}, {"bar": 25}]},
- {"qux": [{"baz": []}, {"baz": [1, 2, 3]}, {"baz": [4]}, {"baz": []}]}
- ],
- "otherkey": {"foo": [{"bar": 1}, {"bar": 2}, {"notbar": 3}, {"bar": 4}]}
- }
- ]},
- "cases": [
- {
- "expression": "reservations[].instances[].foo[].bar",
- "result": [1, 2, 4, 5, 6, 8]
- },
- {
- "expression": "reservations[].instances[].foo[].baz",
- "result": []
- },
- {
- "expression": "reservations[].instances[].notfoo[].bar",
- "result": [20, 21, 22, 23, 24, 25]
- },
- {
- "expression": "reservations[].instances[].notfoo[].notbar",
- "result": [[7], [7]]
- },
- {
- "expression": "reservations[].notinstances[].foo",
- "result": []
- },
- {
- "expression": "reservations[].instances[].foo[].notbar",
- "result": [3, [7]]
- },
- {
- "expression": "reservations[].instances[].bar[].baz",
- "result": [[1], [2], [3], [4]]
- },
- {
- "expression": "reservations[].instances[].baz[].baz",
- "result": [[1, 2], [], [], [3, 4]]
- },
- {
- "expression": "reservations[].instances[].qux[].baz",
- "result": [[], [1, 2, 3], [4], [], [], [1, 2, 3], [4], []]
- },
- {
- "expression": "reservations[].instances[].qux[].baz[]",
- "result": [1, 2, 3, 4, 1, 2, 3, 4]
- }
- ]
-},
-{
- "given": {
- "foo": [
- [["one", "two"], ["three", "four"]],
- [["five", "six"], ["seven", "eight"]],
- [["nine"], ["ten"]]
- ]
- },
- "cases": [
- {
- "expression": "foo[]",
- "result": [["one", "two"], ["three", "four"], ["five", "six"],
- ["seven", "eight"], ["nine"], ["ten"]]
- },
- {
- "expression": "foo[][0]",
- "result": ["one", "three", "five", "seven", "nine", "ten"]
- },
- {
- "expression": "foo[][1]",
- "result": ["two", "four", "six", "eight"]
- },
- {
- "expression": "foo[][0][0]",
- "result": []
- },
- {
- "expression": "foo[][2][2]",
- "result": []
- },
- {
- "expression": "foo[][0][0][100]",
- "result": []
- }
- ]
-},
-{
- "given": {
- "foo": [{
- "bar": [
- {
- "qux": 2,
- "baz": 1
- },
- {
- "qux": 4,
- "baz": 3
- }
- ]
- },
- {
- "bar": [
- {
- "qux": 6,
- "baz": 5
- },
- {
- "qux": 8,
- "baz": 7
- }
- ]
- }
- ]
- },
- "cases": [
- {
- "expression": "foo",
- "result": [{"bar": [{"qux": 2, "baz": 1}, {"qux": 4, "baz": 3}]},
- {"bar": [{"qux": 6, "baz": 5}, {"qux": 8, "baz": 7}]}]
- },
- {
- "expression": "foo[]",
- "result": [{"bar": [{"qux": 2, "baz": 1}, {"qux": 4, "baz": 3}]},
- {"bar": [{"qux": 6, "baz": 5}, {"qux": 8, "baz": 7}]}]
- },
- {
- "expression": "foo[].bar",
- "result": [[{"qux": 2, "baz": 1}, {"qux": 4, "baz": 3}],
- [{"qux": 6, "baz": 5}, {"qux": 8, "baz": 7}]]
- },
- {
- "expression": "foo[].bar[]",
- "result": [{"qux": 2, "baz": 1}, {"qux": 4, "baz": 3},
- {"qux": 6, "baz": 5}, {"qux": 8, "baz": 7}]
- },
- {
- "expression": "foo[].bar[].baz",
- "result": [1, 3, 5, 7]
- }
- ]
-},
-{
- "given": {
- "string": "string",
- "hash": {"foo": "bar", "bar": "baz"},
- "number": 23,
- "nullvalue": null
- },
- "cases": [
- {
- "expression": "string[]",
- "result": null
- },
- {
- "expression": "hash[]",
- "result": null
- },
- {
- "expression": "number[]",
- "result": null
- },
- {
- "expression": "nullvalue[]",
- "result": null
- },
- {
- "expression": "string[].foo",
- "result": null
- },
- {
- "expression": "hash[].foo",
- "result": null
- },
- {
- "expression": "number[].foo",
- "result": null
- },
- {
- "expression": "nullvalue[].foo",
- "result": null
- },
- {
- "expression": "nullvalue[].foo[].bar",
- "result": null
- }
- ]
-}
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/literal.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/literal.json
deleted file mode 100644
index c6706b97..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/literal.json
+++ /dev/null
@@ -1,185 +0,0 @@
-[
- {
- "given": {
- "foo": [{"name": "a"}, {"name": "b"}],
- "bar": {"baz": "qux"}
- },
- "cases": [
- {
- "expression": "`\"foo\"`",
- "result": "foo"
- },
- {
- "comment": "Interpret escaped unicode.",
- "expression": "`\"\\u03a6\"`",
- "result": "Φ"
- },
- {
- "expression": "`\"✓\"`",
- "result": "✓"
- },
- {
- "expression": "`[1, 2, 3]`",
- "result": [1, 2, 3]
- },
- {
- "expression": "`{\"a\": \"b\"}`",
- "result": {"a": "b"}
- },
- {
- "expression": "`true`",
- "result": true
- },
- {
- "expression": "`false`",
- "result": false
- },
- {
- "expression": "`null`",
- "result": null
- },
- {
- "expression": "`0`",
- "result": 0
- },
- {
- "expression": "`1`",
- "result": 1
- },
- {
- "expression": "`2`",
- "result": 2
- },
- {
- "expression": "`3`",
- "result": 3
- },
- {
- "expression": "`4`",
- "result": 4
- },
- {
- "expression": "`5`",
- "result": 5
- },
- {
- "expression": "`6`",
- "result": 6
- },
- {
- "expression": "`7`",
- "result": 7
- },
- {
- "expression": "`8`",
- "result": 8
- },
- {
- "expression": "`9`",
- "result": 9
- },
- {
- "comment": "Escaping a backtick in quotes",
- "expression": "`\"foo\\`bar\"`",
- "result": "foo`bar"
- },
- {
- "comment": "Double quote in literal",
- "expression": "`\"foo\\\"bar\"`",
- "result": "foo\"bar"
- },
- {
- "expression": "`\"1\\`\"`",
- "result": "1`"
- },
- {
- "comment": "Multiple literal expressions with escapes",
- "expression": "`\"\\\\\"`.{a:`\"b\"`}",
- "result": {"a": "b"}
- },
- {
- "comment": "literal . identifier",
- "expression": "`{\"a\": \"b\"}`.a",
- "result": "b"
- },
- {
- "comment": "literal . identifier . identifier",
- "expression": "`{\"a\": {\"b\": \"c\"}}`.a.b",
- "result": "c"
- },
- {
- "comment": "literal . identifier bracket-expr",
- "expression": "`[0, 1, 2]`[1]",
- "result": 1
- }
- ]
- },
- {
- "comment": "Literals",
- "given": {"type": "object"},
- "cases": [
- {
- "comment": "Literal with leading whitespace",
- "expression": "` {\"foo\": true}`",
- "result": {"foo": true}
- },
- {
- "comment": "Literal with trailing whitespace",
- "expression": "`{\"foo\": true} `",
- "result": {"foo": true}
- },
- {
- "comment": "Literal on RHS of subexpr not allowed",
- "expression": "foo.`\"bar\"`",
- "error": "syntax"
- }
- ]
- },
- {
- "comment": "Raw String Literals",
- "given": {},
- "cases": [
- {
- "expression": "'foo'",
- "result": "foo"
- },
- {
- "expression": "' foo '",
- "result": " foo "
- },
- {
- "expression": "'0'",
- "result": "0"
- },
- {
- "expression": "'newline\n'",
- "result": "newline\n"
- },
- {
- "expression": "'\n'",
- "result": "\n"
- },
- {
- "expression": "'✓'",
- "result": "✓"
- },
- {
- "expression": "'𝄞'",
- "result": "𝄞"
- },
- {
- "expression": "' [foo] '",
- "result": " [foo] "
- },
- {
- "expression": "'[foo]'",
- "result": "[foo]"
- },
- {
- "comment": "Do not interpret escaped unicode.",
- "expression": "'\\u03a6'",
- "result": "\\u03a6"
- }
- ]
- }
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/multiselect.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/multiselect.json
deleted file mode 100644
index 8f2a481e..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/multiselect.json
+++ /dev/null
@@ -1,393 +0,0 @@
-[{
- "given": {
- "foo": {
- "bar": "bar",
- "baz": "baz",
- "qux": "qux",
- "nested": {
- "one": {
- "a": "first",
- "b": "second",
- "c": "third"
- },
- "two": {
- "a": "first",
- "b": "second",
- "c": "third"
- },
- "three": {
- "a": "first",
- "b": "second",
- "c": {"inner": "third"}
- }
- }
- },
- "bar": 1,
- "baz": 2,
- "qux\"": 3
- },
- "cases": [
- {
- "expression": "foo.{bar: bar}",
- "result": {"bar": "bar"}
- },
- {
- "expression": "foo.{\"bar\": bar}",
- "result": {"bar": "bar"}
- },
- {
- "expression": "foo.{\"foo.bar\": bar}",
- "result": {"foo.bar": "bar"}
- },
- {
- "expression": "foo.{bar: bar, baz: baz}",
- "result": {"bar": "bar", "baz": "baz"}
- },
- {
- "expression": "foo.{\"bar\": bar, \"baz\": baz}",
- "result": {"bar": "bar", "baz": "baz"}
- },
- {
- "expression": "{\"baz\": baz, \"qux\\\"\": \"qux\\\"\"}",
- "result": {"baz": 2, "qux\"": 3}
- },
- {
- "expression": "foo.{bar:bar,baz:baz}",
- "result": {"bar": "bar", "baz": "baz"}
- },
- {
- "expression": "foo.{bar: bar,qux: qux}",
- "result": {"bar": "bar", "qux": "qux"}
- },
- {
- "expression": "foo.{bar: bar, noexist: noexist}",
- "result": {"bar": "bar", "noexist": null}
- },
- {
- "expression": "foo.{noexist: noexist, alsonoexist: alsonoexist}",
- "result": {"noexist": null, "alsonoexist": null}
- },
- {
- "expression": "foo.badkey.{nokey: nokey, alsonokey: alsonokey}",
- "result": null
- },
- {
- "expression": "foo.nested.*.{a: a,b: b}",
- "result": [{"a": "first", "b": "second"},
- {"a": "first", "b": "second"},
- {"a": "first", "b": "second"}]
- },
- {
- "expression": "foo.nested.three.{a: a, cinner: c.inner}",
- "result": {"a": "first", "cinner": "third"}
- },
- {
- "expression": "foo.nested.three.{a: a, c: c.inner.bad.key}",
- "result": {"a": "first", "c": null}
- },
- {
- "expression": "foo.{a: nested.one.a, b: nested.two.b}",
- "result": {"a": "first", "b": "second"}
- },
- {
- "expression": "{bar: bar, baz: baz}",
- "result": {"bar": 1, "baz": 2}
- },
- {
- "expression": "{bar: bar}",
- "result": {"bar": 1}
- },
- {
- "expression": "{otherkey: bar}",
- "result": {"otherkey": 1}
- },
- {
- "expression": "{no: no, exist: exist}",
- "result": {"no": null, "exist": null}
- },
- {
- "expression": "foo.[bar]",
- "result": ["bar"]
- },
- {
- "expression": "foo.[bar,baz]",
- "result": ["bar", "baz"]
- },
- {
- "expression": "foo.[bar,qux]",
- "result": ["bar", "qux"]
- },
- {
- "expression": "foo.[bar,noexist]",
- "result": ["bar", null]
- },
- {
- "expression": "foo.[noexist,alsonoexist]",
- "result": [null, null]
- }
- ]
-}, {
- "given": {
- "foo": {"bar": 1, "baz": [2, 3, 4]}
- },
- "cases": [
- {
- "expression": "foo.{bar:bar,baz:baz}",
- "result": {"bar": 1, "baz": [2, 3, 4]}
- },
- {
- "expression": "foo.[bar,baz[0]]",
- "result": [1, 2]
- },
- {
- "expression": "foo.[bar,baz[1]]",
- "result": [1, 3]
- },
- {
- "expression": "foo.[bar,baz[2]]",
- "result": [1, 4]
- },
- {
- "expression": "foo.[bar,baz[3]]",
- "result": [1, null]
- },
- {
- "expression": "foo.[bar[0],baz[3]]",
- "result": [null, null]
- }
- ]
-}, {
- "given": {
- "foo": {"bar": 1, "baz": 2}
- },
- "cases": [
- {
- "expression": "foo.{bar: bar, baz: baz}",
- "result": {"bar": 1, "baz": 2}
- },
- {
- "expression": "foo.[bar,baz]",
- "result": [1, 2]
- }
- ]
-}, {
- "given": {
- "foo": {
- "bar": {"baz": [{"common": "first", "one": 1},
- {"common": "second", "two": 2}]},
- "ignoreme": 1,
- "includeme": true
- }
- },
- "cases": [
- {
- "expression": "foo.{bar: bar.baz[1],includeme: includeme}",
- "result": {"bar": {"common": "second", "two": 2}, "includeme": true}
- },
- {
- "expression": "foo.{\"bar.baz.two\": bar.baz[1].two, includeme: includeme}",
- "result": {"bar.baz.two": 2, "includeme": true}
- },
- {
- "expression": "foo.[includeme, bar.baz[*].common]",
- "result": [true, ["first", "second"]]
- },
- {
- "expression": "foo.[includeme, bar.baz[*].none]",
- "result": [true, []]
- },
- {
- "expression": "foo.[includeme, bar.baz[].common]",
- "result": [true, ["first", "second"]]
- }
- ]
-}, {
- "given": {
- "reservations": [{
- "instances": [
- {"id": "id1",
- "name": "first"},
- {"id": "id2",
- "name": "second"}
- ]}, {
- "instances": [
- {"id": "id3",
- "name": "third"},
- {"id": "id4",
- "name": "fourth"}
- ]}
- ]},
- "cases": [
- {
- "expression": "reservations[*].instances[*].{id: id, name: name}",
- "result": [[{"id": "id1", "name": "first"}, {"id": "id2", "name": "second"}],
- [{"id": "id3", "name": "third"}, {"id": "id4", "name": "fourth"}]]
- },
- {
- "expression": "reservations[].instances[].{id: id, name: name}",
- "result": [{"id": "id1", "name": "first"},
- {"id": "id2", "name": "second"},
- {"id": "id3", "name": "third"},
- {"id": "id4", "name": "fourth"}]
- },
- {
- "expression": "reservations[].instances[].[id, name]",
- "result": [["id1", "first"],
- ["id2", "second"],
- ["id3", "third"],
- ["id4", "fourth"]]
- }
- ]
-},
-{
- "given": {
- "foo": [{
- "bar": [
- {
- "qux": 2,
- "baz": 1
- },
- {
- "qux": 4,
- "baz": 3
- }
- ]
- },
- {
- "bar": [
- {
- "qux": 6,
- "baz": 5
- },
- {
- "qux": 8,
- "baz": 7
- }
- ]
- }
- ]
- },
- "cases": [
- {
- "expression": "foo",
- "result": [{"bar": [{"qux": 2, "baz": 1}, {"qux": 4, "baz": 3}]},
- {"bar": [{"qux": 6, "baz": 5}, {"qux": 8, "baz": 7}]}]
- },
- {
- "expression": "foo[]",
- "result": [{"bar": [{"qux": 2, "baz": 1}, {"qux": 4, "baz": 3}]},
- {"bar": [{"qux": 6, "baz": 5}, {"qux": 8, "baz": 7}]}]
- },
- {
- "expression": "foo[].bar",
- "result": [[{"qux": 2, "baz": 1}, {"qux": 4, "baz": 3}],
- [{"qux": 6, "baz": 5}, {"qux": 8, "baz": 7}]]
- },
- {
- "expression": "foo[].bar[]",
- "result": [{"qux": 2, "baz": 1}, {"qux": 4, "baz": 3},
- {"qux": 6, "baz": 5}, {"qux": 8, "baz": 7}]
- },
- {
- "expression": "foo[].bar[].[baz, qux]",
- "result": [[1, 2], [3, 4], [5, 6], [7, 8]]
- },
- {
- "expression": "foo[].bar[].[baz]",
- "result": [[1], [3], [5], [7]]
- },
- {
- "expression": "foo[].bar[].[baz, qux][]",
- "result": [1, 2, 3, 4, 5, 6, 7, 8]
- }
- ]
-},
-{
- "given": {
- "foo": {
- "baz": [
- {
- "bar": "abc"
- }, {
- "bar": "def"
- }
- ],
- "qux": ["zero"]
- }
- },
- "cases": [
- {
- "expression": "foo.[baz[*].bar, qux[0]]",
- "result": [["abc", "def"], "zero"]
- }
- ]
-},
-{
- "given": {
- "foo": {
- "baz": [
- {
- "bar": "a",
- "bam": "b",
- "boo": "c"
- }, {
- "bar": "d",
- "bam": "e",
- "boo": "f"
- }
- ],
- "qux": ["zero"]
- }
- },
- "cases": [
- {
- "expression": "foo.[baz[*].[bar, boo], qux[0]]",
- "result": [[["a", "c" ], ["d", "f" ]], "zero"]
- }
- ]
-},
-{
- "given": {
- "foo": {
- "baz": [
- {
- "bar": "a",
- "bam": "b",
- "boo": "c"
- }, {
- "bar": "d",
- "bam": "e",
- "boo": "f"
- }
- ],
- "qux": ["zero"]
- }
- },
- "cases": [
- {
- "expression": "foo.[baz[*].not_there || baz[*].bar, qux[0]]",
- "result": [["a", "d"], "zero"]
- }
- ]
-},
-{
- "given": {"type": "object"},
- "cases": [
- {
- "comment": "Nested multiselect",
- "expression": "[[*],*]",
- "result": [null, ["object"]]
- }
- ]
-},
-{
- "given": [],
- "cases": [
- {
- "comment": "Nested multiselect",
- "expression": "[[*]]",
- "result": [[]]
- }
- ]
-}
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/ormatch.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/ormatch.json
deleted file mode 100644
index 2127cf44..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/ormatch.json
+++ /dev/null
@@ -1,59 +0,0 @@
-[{
- "given":
- {"outer": {"foo": "foo", "bar": "bar", "baz": "baz"}},
- "cases": [
- {
- "expression": "outer.foo || outer.bar",
- "result": "foo"
- },
- {
- "expression": "outer.foo||outer.bar",
- "result": "foo"
- },
- {
- "expression": "outer.bar || outer.baz",
- "result": "bar"
- },
- {
- "expression": "outer.bar||outer.baz",
- "result": "bar"
- },
- {
- "expression": "outer.bad || outer.foo",
- "result": "foo"
- },
- {
- "expression": "outer.bad||outer.foo",
- "result": "foo"
- },
- {
- "expression": "outer.foo || outer.bad",
- "result": "foo"
- },
- {
- "expression": "outer.foo||outer.bad",
- "result": "foo"
- },
- {
- "expression": "outer.bad || outer.alsobad",
- "result": null
- },
- {
- "expression": "outer.bad||outer.alsobad",
- "result": null
- }
- ]
-}, {
- "given":
- {"outer": {"foo": "foo", "bool": false, "empty_list": [], "empty_string": ""}},
- "cases": [
- {
- "expression": "outer.empty_string || outer.foo",
- "result": "foo"
- },
- {
- "expression": "outer.nokey || outer.bool || outer.empty_list || outer.empty_string || outer.foo",
- "result": "foo"
- }
- ]
-}]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/pipe.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/pipe.json
deleted file mode 100644
index b10c0a49..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/pipe.json
+++ /dev/null
@@ -1,131 +0,0 @@
-[{
- "given": {
- "foo": {
- "bar": {
- "baz": "subkey"
- },
- "other": {
- "baz": "subkey"
- },
- "other2": {
- "baz": "subkey"
- },
- "other3": {
- "notbaz": ["a", "b", "c"]
- },
- "other4": {
- "notbaz": ["a", "b", "c"]
- }
- }
- },
- "cases": [
- {
- "expression": "foo.*.baz | [0]",
- "result": "subkey"
- },
- {
- "expression": "foo.*.baz | [1]",
- "result": "subkey"
- },
- {
- "expression": "foo.*.baz | [2]",
- "result": "subkey"
- },
- {
- "expression": "foo.bar.* | [0]",
- "result": "subkey"
- },
- {
- "expression": "foo.*.notbaz | [*]",
- "result": [["a", "b", "c"], ["a", "b", "c"]]
- },
- {
- "expression": "{\"a\": foo.bar, \"b\": foo.other} | *.baz",
- "result": ["subkey", "subkey"]
- }
- ]
-}, {
- "given": {
- "foo": {
- "bar": {
- "baz": "one"
- },
- "other": {
- "baz": "two"
- },
- "other2": {
- "baz": "three"
- },
- "other3": {
- "notbaz": ["a", "b", "c"]
- },
- "other4": {
- "notbaz": ["d", "e", "f"]
- }
- }
- },
- "cases": [
- {
- "expression": "foo | bar",
- "result": {"baz": "one"}
- },
- {
- "expression": "foo | bar | baz",
- "result": "one"
- },
- {
- "expression": "foo|bar| baz",
- "result": "one"
- },
- {
- "expression": "not_there | [0]",
- "result": null
- },
- {
- "expression": "not_there | [0]",
- "result": null
- },
- {
- "expression": "[foo.bar, foo.other] | [0]",
- "result": {"baz": "one"}
- },
- {
- "expression": "{\"a\": foo.bar, \"b\": foo.other} | a",
- "result": {"baz": "one"}
- },
- {
- "expression": "{\"a\": foo.bar, \"b\": foo.other} | b",
- "result": {"baz": "two"}
- },
- {
- "expression": "foo.bam || foo.bar | baz",
- "result": "one"
- },
- {
- "expression": "foo | not_there || bar",
- "result": {"baz": "one"}
- }
- ]
-}, {
- "given": {
- "foo": [{
- "bar": [{
- "baz": "one"
- }, {
- "baz": "two"
- }]
- }, {
- "bar": [{
- "baz": "three"
- }, {
- "baz": "four"
- }]
- }]
- },
- "cases": [
- {
- "expression": "foo[*].bar[*] | [0][0]",
- "result": {"baz": "one"}
- }
- ]
-}]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/slice.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/slice.json
deleted file mode 100644
index 35947727..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/slice.json
+++ /dev/null
@@ -1,187 +0,0 @@
-[{
- "given": {
- "foo": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- "bar": {
- "baz": 1
- }
- },
- "cases": [
- {
- "expression": "bar[0:10]",
- "result": null
- },
- {
- "expression": "foo[0:10:1]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[0:10]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[0:10:]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[0::1]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[0::]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[0:]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[:10:1]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[::1]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[:10:]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[::]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[:]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[1:9]",
- "result": [1, 2, 3, 4, 5, 6, 7, 8]
- },
- {
- "expression": "foo[0:10:2]",
- "result": [0, 2, 4, 6, 8]
- },
- {
- "expression": "foo[5:]",
- "result": [5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[5::2]",
- "result": [5, 7, 9]
- },
- {
- "expression": "foo[::2]",
- "result": [0, 2, 4, 6, 8]
- },
- {
- "expression": "foo[::-1]",
- "result": [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
- },
- {
- "expression": "foo[1::2]",
- "result": [1, 3, 5, 7, 9]
- },
- {
- "expression": "foo[10:0:-1]",
- "result": [9, 8, 7, 6, 5, 4, 3, 2, 1]
- },
- {
- "expression": "foo[10:5:-1]",
- "result": [9, 8, 7, 6]
- },
- {
- "expression": "foo[8:2:-2]",
- "result": [8, 6, 4]
- },
- {
- "expression": "foo[0:20]",
- "result": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
- },
- {
- "expression": "foo[10:-20:-1]",
- "result": [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
- },
- {
- "expression": "foo[10:-20]",
- "result": []
- },
- {
- "expression": "foo[-4:-1]",
- "result": [6, 7, 8]
- },
- {
- "expression": "foo[:-5:-1]",
- "result": [9, 8, 7, 6]
- },
- {
- "expression": "foo[8:2:0]",
- "error": "invalid-value"
- },
- {
- "expression": "foo[8:2:0:1]",
- "error": "syntax"
- },
- {
- "expression": "foo[8:2&]",
- "error": "syntax"
- },
- {
- "expression": "foo[2:a:3]",
- "error": "syntax"
- }
- ]
-}, {
- "given": {
- "foo": [{"a": 1}, {"a": 2}, {"a": 3}],
- "bar": [{"a": {"b": 1}}, {"a": {"b": 2}},
- {"a": {"b": 3}}],
- "baz": 50
- },
- "cases": [
- {
- "expression": "foo[:2].a",
- "result": [1, 2]
- },
- {
- "expression": "foo[:2].b",
- "result": []
- },
- {
- "expression": "foo[:2].a.b",
- "result": []
- },
- {
- "expression": "bar[::-1].a.b",
- "result": [3, 2, 1]
- },
- {
- "expression": "bar[:2].a.b",
- "result": [1, 2]
- },
- {
- "expression": "baz[:2].a",
- "result": null
- }
- ]
-}, {
- "given": [{"a": 1}, {"a": 2}, {"a": 3}],
- "cases": [
- {
- "expression": "[:]",
- "result": [{"a": 1}, {"a": 2}, {"a": 3}]
- },
- {
- "expression": "[:2].a",
- "result": [1, 2]
- },
- {
- "expression": "[::-1].a",
- "result": [3, 2, 1]
- },
- {
- "expression": "[:2].b",
- "result": []
- }
- ]
-}]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/syntax.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/syntax.json
deleted file mode 100644
index 003c2945..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/syntax.json
+++ /dev/null
@@ -1,616 +0,0 @@
-[{
- "comment": "Dot syntax",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "foo.bar",
- "result": null
- },
- {
- "expression": "foo.1",
- "error": "syntax"
- },
- {
- "expression": "foo.-11",
- "error": "syntax"
- },
- {
- "expression": "foo",
- "result": null
- },
- {
- "expression": "foo.",
- "error": "syntax"
- },
- {
- "expression": "foo.",
- "error": "syntax"
- },
- {
- "expression": ".foo",
- "error": "syntax"
- },
- {
- "expression": "foo..bar",
- "error": "syntax"
- },
- {
- "expression": "foo.bar.",
- "error": "syntax"
- },
- {
- "expression": "foo[.]",
- "error": "syntax"
- }
- ]
-},
- {
- "comment": "Simple token errors",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": ".",
- "error": "syntax"
- },
- {
- "expression": ":",
- "error": "syntax"
- },
- {
- "expression": ",",
- "error": "syntax"
- },
- {
- "expression": "]",
- "error": "syntax"
- },
- {
- "expression": "[",
- "error": "syntax"
- },
- {
- "expression": "}",
- "error": "syntax"
- },
- {
- "expression": "{",
- "error": "syntax"
- },
- {
- "expression": ")",
- "error": "syntax"
- },
- {
- "expression": "(",
- "error": "syntax"
- },
- {
- "expression": "((&",
- "error": "syntax"
- },
- {
- "expression": "a[",
- "error": "syntax"
- },
- {
- "expression": "a]",
- "error": "syntax"
- },
- {
- "expression": "a][",
- "error": "syntax"
- },
- {
- "expression": "!",
- "error": "syntax"
- }
- ]
- },
- {
- "comment": "Boolean syntax errors",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "![!(!",
- "error": "syntax"
- }
- ]
- },
- {
- "comment": "Wildcard syntax",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "*",
- "result": ["object"]
- },
- {
- "expression": "*.*",
- "result": []
- },
- {
- "expression": "*.foo",
- "result": []
- },
- {
- "expression": "*[0]",
- "result": []
- },
- {
- "expression": ".*",
- "error": "syntax"
- },
- {
- "expression": "*foo",
- "error": "syntax"
- },
- {
- "expression": "*0",
- "error": "syntax"
- },
- {
- "expression": "foo[*]bar",
- "error": "syntax"
- },
- {
- "expression": "foo[*]*",
- "error": "syntax"
- }
- ]
- },
- {
- "comment": "Flatten syntax",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "[]",
- "result": null
- }
- ]
- },
- {
- "comment": "Simple bracket syntax",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "[0]",
- "result": null
- },
- {
- "expression": "[*]",
- "result": null
- },
- {
- "expression": "*.[0]",
- "error": "syntax"
- },
- {
- "expression": "*.[\"0\"]",
- "result": [[null]]
- },
- {
- "expression": "[*].bar",
- "result": null
- },
- {
- "expression": "[*][0]",
- "result": null
- },
- {
- "expression": "foo[#]",
- "error": "syntax"
- }
- ]
- },
- {
- "comment": "Multi-select list syntax",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "foo[0]",
- "result": null
- },
- {
- "comment": "Valid multi-select of a list",
- "expression": "foo[0, 1]",
- "error": "syntax"
- },
- {
- "expression": "foo.[0]",
- "error": "syntax"
- },
- {
- "expression": "foo.[*]",
- "result": null
- },
- {
- "comment": "Multi-select of a list with trailing comma",
- "expression": "foo[0, ]",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a list with trailing comma and no close",
- "expression": "foo[0,",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a list with trailing comma and no close",
- "expression": "foo.[a",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a list with extra comma",
- "expression": "foo[0,, 1]",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a list using an identifier index",
- "expression": "foo[abc]",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a list using identifier indices",
- "expression": "foo[abc, def]",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a list using an identifier index",
- "expression": "foo[abc, 1]",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a list using an identifier index with trailing comma",
- "expression": "foo[abc, ]",
- "error": "syntax"
- },
- {
- "comment": "Valid multi-select of a hash using an identifier index",
- "expression": "foo.[abc]",
- "result": null
- },
- {
- "comment": "Valid multi-select of a hash",
- "expression": "foo.[abc, def]",
- "result": null
- },
- {
- "comment": "Multi-select of a hash using a numeric index",
- "expression": "foo.[abc, 1]",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a hash with a trailing comma",
- "expression": "foo.[abc, ]",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a hash with extra commas",
- "expression": "foo.[abc,, def]",
- "error": "syntax"
- },
- {
- "comment": "Multi-select of a hash using number indices",
- "expression": "foo.[0, 1]",
- "error": "syntax"
- }
- ]
- },
- {
- "comment": "Multi-select hash syntax",
- "given": {"type": "object"},
- "cases": [
- {
- "comment": "No key or value",
- "expression": "a{}",
- "error": "syntax"
- },
- {
- "comment": "No closing token",
- "expression": "a{",
- "error": "syntax"
- },
- {
- "comment": "Not a key value pair",
- "expression": "a{foo}",
- "error": "syntax"
- },
- {
- "comment": "Missing value and closing character",
- "expression": "a{foo:",
- "error": "syntax"
- },
- {
- "comment": "Missing closing character",
- "expression": "a{foo: 0",
- "error": "syntax"
- },
- {
- "comment": "Missing value",
- "expression": "a{foo:}",
- "error": "syntax"
- },
- {
- "comment": "Trailing comma and no closing character",
- "expression": "a{foo: 0, ",
- "error": "syntax"
- },
- {
- "comment": "Missing value with trailing comma",
- "expression": "a{foo: ,}",
- "error": "syntax"
- },
- {
- "comment": "Accessing Array using an identifier",
- "expression": "a{foo: bar}",
- "error": "syntax"
- },
- {
- "expression": "a{foo: 0}",
- "error": "syntax"
- },
- {
- "comment": "Missing key-value pair",
- "expression": "a.{}",
- "error": "syntax"
- },
- {
- "comment": "Not a key-value pair",
- "expression": "a.{foo}",
- "error": "syntax"
- },
- {
- "comment": "Missing value",
- "expression": "a.{foo:}",
- "error": "syntax"
- },
- {
- "comment": "Missing value with trailing comma",
- "expression": "a.{foo: ,}",
- "error": "syntax"
- },
- {
- "comment": "Valid multi-select hash extraction",
- "expression": "a.{foo: bar}",
- "result": null
- },
- {
- "comment": "Valid multi-select hash extraction",
- "expression": "a.{foo: bar, baz: bam}",
- "result": null
- },
- {
- "comment": "Trailing comma",
- "expression": "a.{foo: bar, }",
- "error": "syntax"
- },
- {
- "comment": "Missing key in second key-value pair",
- "expression": "a.{foo: bar, baz}",
- "error": "syntax"
- },
- {
- "comment": "Missing value in second key-value pair",
- "expression": "a.{foo: bar, baz:}",
- "error": "syntax"
- },
- {
- "comment": "Trailing comma",
- "expression": "a.{foo: bar, baz: bam, }",
- "error": "syntax"
- },
- {
- "comment": "Nested multi select",
- "expression": "{\"\\\\\":{\" \":*}}",
- "result": {"\\": {" ": ["object"]}}
- }
- ]
- },
- {
- "comment": "Or expressions",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "foo || bar",
- "result": null
- },
- {
- "expression": "foo ||",
- "error": "syntax"
- },
- {
- "expression": "foo.|| bar",
- "error": "syntax"
- },
- {
- "expression": " || foo",
- "error": "syntax"
- },
- {
- "expression": "foo || || foo",
- "error": "syntax"
- },
- {
- "expression": "foo.[a || b]",
- "result": null
- },
- {
- "expression": "foo.[a ||]",
- "error": "syntax"
- },
- {
- "expression": "\"foo",
- "error": "syntax"
- }
- ]
- },
- {
- "comment": "Filter expressions",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "foo[?bar==`\"baz\"`]",
- "result": null
- },
- {
- "expression": "foo[? bar == `\"baz\"` ]",
- "result": null
- },
- {
- "expression": "foo[ ?bar==`\"baz\"`]",
- "error": "syntax"
- },
- {
- "expression": "foo[?bar==]",
- "error": "syntax"
- },
- {
- "expression": "foo[?==]",
- "error": "syntax"
- },
- {
- "expression": "foo[?==bar]",
- "error": "syntax"
- },
- {
- "expression": "foo[?bar==baz?]",
- "error": "syntax"
- },
- {
- "expression": "foo[?a.b.c==d.e.f]",
- "result": null
- },
- {
- "expression": "foo[?bar==`[0, 1, 2]`]",
- "result": null
- },
- {
- "expression": "foo[?bar==`[\"a\", \"b\", \"c\"]`]",
- "result": null
- },
- {
- "comment": "Literal char not escaped",
- "expression": "foo[?bar==`[\"foo`bar\"]`]",
- "error": "syntax"
- },
- {
- "comment": "Literal char escaped",
- "expression": "foo[?bar==`[\"foo\\`bar\"]`]",
- "result": null
- },
- {
- "comment": "Unknown comparator",
- "expression": "foo[?bar<>baz]",
- "error": "syntax"
- },
- {
- "comment": "Unknown comparator",
- "expression": "foo[?bar^baz]",
- "error": "syntax"
- },
- {
- "expression": "foo[bar==baz]",
- "error": "syntax"
- },
- {
- "comment": "Quoted identifier in filter expression no spaces",
- "expression": "[?\"\\\\\">`\"foo\"`]",
- "result": null
- },
- {
- "comment": "Quoted identifier in filter expression with spaces",
- "expression": "[?\"\\\\\" > `\"foo\"`]",
- "result": null
- }
- ]
- },
- {
- "comment": "Filter expression errors",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "bar.`\"anything\"`",
- "error": "syntax"
- },
- {
- "expression": "bar.baz.noexists.`\"literal\"`",
- "error": "syntax"
- },
- {
- "comment": "Literal wildcard projection",
- "expression": "foo[*].`\"literal\"`",
- "error": "syntax"
- },
- {
- "expression": "foo[*].name.`\"literal\"`",
- "error": "syntax"
- },
- {
- "expression": "foo[].name.`\"literal\"`",
- "error": "syntax"
- },
- {
- "expression": "foo[].name.`\"literal\"`.`\"subliteral\"`",
- "error": "syntax"
- },
- {
- "comment": "Projecting a literal onto an empty list",
- "expression": "foo[*].name.noexist.`\"literal\"`",
- "error": "syntax"
- },
- {
- "expression": "foo[].name.noexist.`\"literal\"`",
- "error": "syntax"
- },
- {
- "expression": "twolen[*].`\"foo\"`",
- "error": "syntax"
- },
- {
- "comment": "Two level projection of a literal",
- "expression": "twolen[*].threelen[*].`\"bar\"`",
- "error": "syntax"
- },
- {
- "comment": "Two level flattened projection of a literal",
- "expression": "twolen[].threelen[].`\"bar\"`",
- "error": "syntax"
- }
- ]
- },
- {
- "comment": "Identifiers",
- "given": {"type": "object"},
- "cases": [
- {
- "expression": "foo",
- "result": null
- },
- {
- "expression": "\"foo\"",
- "result": null
- },
- {
- "expression": "\"\\\\\"",
- "result": null
- }
- ]
- },
- {
- "comment": "Combined syntax",
- "given": [],
- "cases": [
- {
- "expression": "*||*|*|*",
- "result": null
- },
- {
- "expression": "*[]||[*]",
- "result": []
- },
- {
- "expression": "[*.*]",
- "result": [null]
- }
- ]
- }
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/unicode.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/unicode.json
deleted file mode 100644
index 6b07b0b6..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/unicode.json
+++ /dev/null
@@ -1,38 +0,0 @@
-[
- {
- "given": {"foo": [{"✓": "✓"}, {"✓": "✗"}]},
- "cases": [
- {
- "expression": "foo[].\"✓\"",
- "result": ["✓", "✗"]
- }
- ]
- },
- {
- "given": {"☯": true},
- "cases": [
- {
- "expression": "\"☯\"",
- "result": true
- }
- ]
- },
- {
- "given": {"♪♫•*¨*•.¸¸❤¸¸.•*¨*•♫♪": true},
- "cases": [
- {
- "expression": "\"♪♫•*¨*•.¸¸❤¸¸.•*¨*•♫♪\"",
- "result": true
- }
- ]
- },
- {
- "given": {"☃": true},
- "cases": [
- {
- "expression": "\"☃\"",
- "result": true
- }
- ]
- }
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/wildcard.json b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/wildcard.json
deleted file mode 100644
index 3bcec302..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance/wildcard.json
+++ /dev/null
@@ -1,460 +0,0 @@
-[{
- "given": {
- "foo": {
- "bar": {
- "baz": "val"
- },
- "other": {
- "baz": "val"
- },
- "other2": {
- "baz": "val"
- },
- "other3": {
- "notbaz": ["a", "b", "c"]
- },
- "other4": {
- "notbaz": ["a", "b", "c"]
- },
- "other5": {
- "other": {
- "a": 1,
- "b": 1,
- "c": 1
- }
- }
- }
- },
- "cases": [
- {
- "expression": "foo.*.baz",
- "result": ["val", "val", "val"]
- },
- {
- "expression": "foo.bar.*",
- "result": ["val"]
- },
- {
- "expression": "foo.*.notbaz",
- "result": [["a", "b", "c"], ["a", "b", "c"]]
- },
- {
- "expression": "foo.*.notbaz[0]",
- "result": ["a", "a"]
- },
- {
- "expression": "foo.*.notbaz[-1]",
- "result": ["c", "c"]
- }
- ]
-}, {
- "given": {
- "foo": {
- "first-1": {
- "second-1": "val"
- },
- "first-2": {
- "second-1": "val"
- },
- "first-3": {
- "second-1": "val"
- }
- }
- },
- "cases": [
- {
- "expression": "foo.*",
- "result": [{"second-1": "val"}, {"second-1": "val"},
- {"second-1": "val"}]
- },
- {
- "expression": "foo.*.*",
- "result": [["val"], ["val"], ["val"]]
- },
- {
- "expression": "foo.*.*.*",
- "result": [[], [], []]
- },
- {
- "expression": "foo.*.*.*.*",
- "result": [[], [], []]
- }
- ]
-}, {
- "given": {
- "foo": {
- "bar": "one"
- },
- "other": {
- "bar": "one"
- },
- "nomatch": {
- "notbar": "three"
- }
- },
- "cases": [
- {
- "expression": "*.bar",
- "result": ["one", "one"]
- }
- ]
-}, {
- "given": {
- "top1": {
- "sub1": {"foo": "one"}
- },
- "top2": {
- "sub1": {"foo": "one"}
- }
- },
- "cases": [
- {
- "expression": "*",
- "result": [{"sub1": {"foo": "one"}},
- {"sub1": {"foo": "one"}}]
- },
- {
- "expression": "*.sub1",
- "result": [{"foo": "one"},
- {"foo": "one"}]
- },
- {
- "expression": "*.*",
- "result": [[{"foo": "one"}],
- [{"foo": "one"}]]
- },
- {
- "expression": "*.*.foo[]",
- "result": ["one", "one"]
- },
- {
- "expression": "*.sub1.foo",
- "result": ["one", "one"]
- }
- ]
-},
-{
- "given":
- {"foo": [{"bar": "one"}, {"bar": "two"}, {"bar": "three"}, {"notbar": "four"}]},
- "cases": [
- {
- "expression": "foo[*].bar",
- "result": ["one", "two", "three"]
- },
- {
- "expression": "foo[*].notbar",
- "result": ["four"]
- }
- ]
-},
-{
- "given":
- [{"bar": "one"}, {"bar": "two"}, {"bar": "three"}, {"notbar": "four"}],
- "cases": [
- {
- "expression": "[*]",
- "result": [{"bar": "one"}, {"bar": "two"}, {"bar": "three"}, {"notbar": "four"}]
- },
- {
- "expression": "[*].bar",
- "result": ["one", "two", "three"]
- },
- {
- "expression": "[*].notbar",
- "result": ["four"]
- }
- ]
-},
-{
- "given": {
- "foo": {
- "bar": [
- {"baz": ["one", "two", "three"]},
- {"baz": ["four", "five", "six"]},
- {"baz": ["seven", "eight", "nine"]}
- ]
- }
- },
- "cases": [
- {
- "expression": "foo.bar[*].baz",
- "result": [["one", "two", "three"], ["four", "five", "six"], ["seven", "eight", "nine"]]
- },
- {
- "expression": "foo.bar[*].baz[0]",
- "result": ["one", "four", "seven"]
- },
- {
- "expression": "foo.bar[*].baz[1]",
- "result": ["two", "five", "eight"]
- },
- {
- "expression": "foo.bar[*].baz[2]",
- "result": ["three", "six", "nine"]
- },
- {
- "expression": "foo.bar[*].baz[3]",
- "result": []
- }
- ]
-},
-{
- "given": {
- "foo": {
- "bar": [["one", "two"], ["three", "four"]]
- }
- },
- "cases": [
- {
- "expression": "foo.bar[*]",
- "result": [["one", "two"], ["three", "four"]]
- },
- {
- "expression": "foo.bar[0]",
- "result": ["one", "two"]
- },
- {
- "expression": "foo.bar[0][0]",
- "result": "one"
- },
- {
- "expression": "foo.bar[0][0][0]",
- "result": null
- },
- {
- "expression": "foo.bar[0][0][0][0]",
- "result": null
- },
- {
- "expression": "foo[0][0]",
- "result": null
- }
- ]
-},
-{
- "given": {
- "foo": [
- {"bar": [{"kind": "basic"}, {"kind": "intermediate"}]},
- {"bar": [{"kind": "advanced"}, {"kind": "expert"}]},
- {"bar": "string"}
- ]
-
- },
- "cases": [
- {
- "expression": "foo[*].bar[*].kind",
- "result": [["basic", "intermediate"], ["advanced", "expert"]]
- },
- {
- "expression": "foo[*].bar[0].kind",
- "result": ["basic", "advanced"]
- }
- ]
-},
-{
- "given": {
- "foo": [
- {"bar": {"kind": "basic"}},
- {"bar": {"kind": "intermediate"}},
- {"bar": {"kind": "advanced"}},
- {"bar": {"kind": "expert"}},
- {"bar": "string"}
- ]
- },
- "cases": [
- {
- "expression": "foo[*].bar.kind",
- "result": ["basic", "intermediate", "advanced", "expert"]
- }
- ]
-},
-{
- "given": {
- "foo": [{"bar": ["one", "two"]}, {"bar": ["three", "four"]}, {"bar": ["five"]}]
- },
- "cases": [
- {
- "expression": "foo[*].bar[0]",
- "result": ["one", "three", "five"]
- },
- {
- "expression": "foo[*].bar[1]",
- "result": ["two", "four"]
- },
- {
- "expression": "foo[*].bar[2]",
- "result": []
- }
- ]
-},
-{
- "given": {
- "foo": [{"bar": []}, {"bar": []}, {"bar": []}]
- },
- "cases": [
- {
- "expression": "foo[*].bar[0]",
- "result": []
- }
- ]
-},
-{
- "given": {
- "foo": [["one", "two"], ["three", "four"], ["five"]]
- },
- "cases": [
- {
- "expression": "foo[*][0]",
- "result": ["one", "three", "five"]
- },
- {
- "expression": "foo[*][1]",
- "result": ["two", "four"]
- }
- ]
-},
-{
- "given": {
- "foo": [
- [
- ["one", "two"], ["three", "four"]
- ], [
- ["five", "six"], ["seven", "eight"]
- ], [
- ["nine"], ["ten"]
- ]
- ]
- },
- "cases": [
- {
- "expression": "foo[*][0]",
- "result": [["one", "two"], ["five", "six"], ["nine"]]
- },
- {
- "expression": "foo[*][1]",
- "result": [["three", "four"], ["seven", "eight"], ["ten"]]
- },
- {
- "expression": "foo[*][0][0]",
- "result": ["one", "five", "nine"]
- },
- {
- "expression": "foo[*][1][0]",
- "result": ["three", "seven", "ten"]
- },
- {
- "expression": "foo[*][0][1]",
- "result": ["two", "six"]
- },
- {
- "expression": "foo[*][1][1]",
- "result": ["four", "eight"]
- },
- {
- "expression": "foo[*][2]",
- "result": []
- },
- {
- "expression": "foo[*][2][2]",
- "result": []
- },
- {
- "expression": "bar[*]",
- "result": null
- },
- {
- "expression": "bar[*].baz[*]",
- "result": null
- }
- ]
-},
-{
- "given": {
- "string": "string",
- "hash": {"foo": "bar", "bar": "baz"},
- "number": 23,
- "nullvalue": null
- },
- "cases": [
- {
- "expression": "string[*]",
- "result": null
- },
- {
- "expression": "hash[*]",
- "result": null
- },
- {
- "expression": "number[*]",
- "result": null
- },
- {
- "expression": "nullvalue[*]",
- "result": null
- },
- {
- "expression": "string[*].foo",
- "result": null
- },
- {
- "expression": "hash[*].foo",
- "result": null
- },
- {
- "expression": "number[*].foo",
- "result": null
- },
- {
- "expression": "nullvalue[*].foo",
- "result": null
- },
- {
- "expression": "nullvalue[*].foo[*].bar",
- "result": null
- }
- ]
-},
-{
- "given": {
- "string": "string",
- "hash": {"foo": "val", "bar": "val"},
- "number": 23,
- "array": [1, 2, 3],
- "nullvalue": null
- },
- "cases": [
- {
- "expression": "string.*",
- "result": null
- },
- {
- "expression": "hash.*",
- "result": ["val", "val"]
- },
- {
- "expression": "number.*",
- "result": null
- },
- {
- "expression": "array.*",
- "result": null
- },
- {
- "expression": "nullvalue.*",
- "result": null
- }
- ]
-},
-{
- "given": {
- "a": [0, 1, 2],
- "b": [0, 1, 2]
- },
- "cases": [
- {
- "expression": "*[0]",
- "result": [0, 0]
- }
- ]
-}
-]
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance_test.go
deleted file mode 100644
index 4ee9c959..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/compliance_test.go
+++ /dev/null
@@ -1,123 +0,0 @@
-package jmespath
-
-import (
- "encoding/json"
- "fmt"
- "io/ioutil"
- "os"
- "path/filepath"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-type TestSuite struct {
- Given interface{}
- TestCases []TestCase `json:"cases"`
- Comment string
-}
-type TestCase struct {
- Comment string
- Expression string
- Result interface{}
- Error string
-}
-
-var whiteListed = []string{
- "compliance/basic.json",
- "compliance/current.json",
- "compliance/escape.json",
- "compliance/filters.json",
- "compliance/functions.json",
- "compliance/identifiers.json",
- "compliance/indices.json",
- "compliance/literal.json",
- "compliance/multiselect.json",
- "compliance/ormatch.json",
- "compliance/pipe.json",
- "compliance/slice.json",
- "compliance/syntax.json",
- "compliance/unicode.json",
- "compliance/wildcard.json",
- "compliance/boolean.json",
-}
-
-func allowed(path string) bool {
- for _, el := range whiteListed {
- if el == path {
- return true
- }
- }
- return false
-}
-
-func TestCompliance(t *testing.T) {
- assert := assert.New(t)
-
- var complianceFiles []string
- err := filepath.Walk("compliance", func(path string, _ os.FileInfo, _ error) error {
- //if strings.HasSuffix(path, ".json") {
- if allowed(path) {
- complianceFiles = append(complianceFiles, path)
- }
- return nil
- })
- if assert.Nil(err) {
- for _, filename := range complianceFiles {
- runComplianceTest(assert, filename)
- }
- }
-}
-
-func runComplianceTest(assert *assert.Assertions, filename string) {
- var testSuites []TestSuite
- data, err := ioutil.ReadFile(filename)
- if assert.Nil(err) {
- err := json.Unmarshal(data, &testSuites)
- if assert.Nil(err) {
- for _, testsuite := range testSuites {
- runTestSuite(assert, testsuite, filename)
- }
- }
- }
-}
-
-func runTestSuite(assert *assert.Assertions, testsuite TestSuite, filename string) {
- for _, testcase := range testsuite.TestCases {
- if testcase.Error != "" {
- // This is a test case that verifies we error out properly.
- runSyntaxTestCase(assert, testsuite.Given, testcase, filename)
- } else {
- runTestCase(assert, testsuite.Given, testcase, filename)
- }
- }
-}
-
-func runSyntaxTestCase(assert *assert.Assertions, given interface{}, testcase TestCase, filename string) {
- // Anything with an .Error means that we expect that JMESPath should return
- // an error when we try to evaluate the expression.
- _, err := Search(testcase.Expression, given)
- assert.NotNil(err, fmt.Sprintf("Expression: %s", testcase.Expression))
-}
-
-func runTestCase(assert *assert.Assertions, given interface{}, testcase TestCase, filename string) {
- lexer := NewLexer()
- var err error
- _, err = lexer.tokenize(testcase.Expression)
- if err != nil {
- errMsg := fmt.Sprintf("(%s) Could not lex expression: %s -- %s", filename, testcase.Expression, err.Error())
- assert.Fail(errMsg)
- return
- }
- parser := NewParser()
- _, err = parser.Parse(testcase.Expression)
- if err != nil {
- errMsg := fmt.Sprintf("(%s) Could not parse expression: %s -- %s", filename, testcase.Expression, err.Error())
- assert.Fail(errMsg)
- return
- }
- actual, err := Search(testcase.Expression, given)
- if assert.Nil(err, fmt.Sprintf("Expression: %s", testcase.Expression)) {
- assert.Equal(testcase.Result, actual, fmt.Sprintf("Expression: %s", testcase.Expression))
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/functions.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/functions.go
deleted file mode 100644
index 9b7cd89b..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/functions.go
+++ /dev/null
@@ -1,842 +0,0 @@
-package jmespath
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "math"
- "reflect"
- "sort"
- "strconv"
- "strings"
- "unicode/utf8"
-)
-
-type jpFunction func(arguments []interface{}) (interface{}, error)
-
-type jpType string
-
-const (
- jpUnknown jpType = "unknown"
- jpNumber jpType = "number"
- jpString jpType = "string"
- jpArray jpType = "array"
- jpObject jpType = "object"
- jpArrayNumber jpType = "array[number]"
- jpArrayString jpType = "array[string]"
- jpExpref jpType = "expref"
- jpAny jpType = "any"
-)
-
-type functionEntry struct {
- name string
- arguments []argSpec
- handler jpFunction
- hasExpRef bool
-}
-
-type argSpec struct {
- types []jpType
- variadic bool
-}
-
-type byExprString struct {
- intr *treeInterpreter
- node ASTNode
- items []interface{}
- hasError bool
-}
-
-func (a *byExprString) Len() int {
- return len(a.items)
-}
-func (a *byExprString) Swap(i, j int) {
- a.items[i], a.items[j] = a.items[j], a.items[i]
-}
-func (a *byExprString) Less(i, j int) bool {
- first, err := a.intr.Execute(a.node, a.items[i])
- if err != nil {
- a.hasError = true
- // Return a dummy value.
- return true
- }
- ith, ok := first.(string)
- if !ok {
- a.hasError = true
- return true
- }
- second, err := a.intr.Execute(a.node, a.items[j])
- if err != nil {
- a.hasError = true
- // Return a dummy value.
- return true
- }
- jth, ok := second.(string)
- if !ok {
- a.hasError = true
- return true
- }
- return ith < jth
-}
-
-type byExprFloat struct {
- intr *treeInterpreter
- node ASTNode
- items []interface{}
- hasError bool
-}
-
-func (a *byExprFloat) Len() int {
- return len(a.items)
-}
-func (a *byExprFloat) Swap(i, j int) {
- a.items[i], a.items[j] = a.items[j], a.items[i]
-}
-func (a *byExprFloat) Less(i, j int) bool {
- first, err := a.intr.Execute(a.node, a.items[i])
- if err != nil {
- a.hasError = true
- // Return a dummy value.
- return true
- }
- ith, ok := first.(float64)
- if !ok {
- a.hasError = true
- return true
- }
- second, err := a.intr.Execute(a.node, a.items[j])
- if err != nil {
- a.hasError = true
- // Return a dummy value.
- return true
- }
- jth, ok := second.(float64)
- if !ok {
- a.hasError = true
- return true
- }
- return ith < jth
-}
-
-type functionCaller struct {
- functionTable map[string]functionEntry
-}
-
-func newFunctionCaller() *functionCaller {
- caller := &functionCaller{}
- caller.functionTable = map[string]functionEntry{
- "length": {
- name: "length",
- arguments: []argSpec{
- {types: []jpType{jpString, jpArray, jpObject}},
- },
- handler: jpfLength,
- },
- "starts_with": {
- name: "starts_with",
- arguments: []argSpec{
- {types: []jpType{jpString}},
- {types: []jpType{jpString}},
- },
- handler: jpfStartsWith,
- },
- "abs": {
- name: "abs",
- arguments: []argSpec{
- {types: []jpType{jpNumber}},
- },
- handler: jpfAbs,
- },
- "avg": {
- name: "avg",
- arguments: []argSpec{
- {types: []jpType{jpArrayNumber}},
- },
- handler: jpfAvg,
- },
- "ceil": {
- name: "ceil",
- arguments: []argSpec{
- {types: []jpType{jpNumber}},
- },
- handler: jpfCeil,
- },
- "contains": {
- name: "contains",
- arguments: []argSpec{
- {types: []jpType{jpArray, jpString}},
- {types: []jpType{jpAny}},
- },
- handler: jpfContains,
- },
- "ends_with": {
- name: "ends_with",
- arguments: []argSpec{
- {types: []jpType{jpString}},
- {types: []jpType{jpString}},
- },
- handler: jpfEndsWith,
- },
- "floor": {
- name: "floor",
- arguments: []argSpec{
- {types: []jpType{jpNumber}},
- },
- handler: jpfFloor,
- },
- "map": {
- name: "amp",
- arguments: []argSpec{
- {types: []jpType{jpExpref}},
- {types: []jpType{jpArray}},
- },
- handler: jpfMap,
- hasExpRef: true,
- },
- "max": {
- name: "max",
- arguments: []argSpec{
- {types: []jpType{jpArrayNumber, jpArrayString}},
- },
- handler: jpfMax,
- },
- "merge": {
- name: "merge",
- arguments: []argSpec{
- {types: []jpType{jpObject}, variadic: true},
- },
- handler: jpfMerge,
- },
- "max_by": {
- name: "max_by",
- arguments: []argSpec{
- {types: []jpType{jpArray}},
- {types: []jpType{jpExpref}},
- },
- handler: jpfMaxBy,
- hasExpRef: true,
- },
- "sum": {
- name: "sum",
- arguments: []argSpec{
- {types: []jpType{jpArrayNumber}},
- },
- handler: jpfSum,
- },
- "min": {
- name: "min",
- arguments: []argSpec{
- {types: []jpType{jpArrayNumber, jpArrayString}},
- },
- handler: jpfMin,
- },
- "min_by": {
- name: "min_by",
- arguments: []argSpec{
- {types: []jpType{jpArray}},
- {types: []jpType{jpExpref}},
- },
- handler: jpfMinBy,
- hasExpRef: true,
- },
- "type": {
- name: "type",
- arguments: []argSpec{
- {types: []jpType{jpAny}},
- },
- handler: jpfType,
- },
- "keys": {
- name: "keys",
- arguments: []argSpec{
- {types: []jpType{jpObject}},
- },
- handler: jpfKeys,
- },
- "values": {
- name: "values",
- arguments: []argSpec{
- {types: []jpType{jpObject}},
- },
- handler: jpfValues,
- },
- "sort": {
- name: "sort",
- arguments: []argSpec{
- {types: []jpType{jpArrayString, jpArrayNumber}},
- },
- handler: jpfSort,
- },
- "sort_by": {
- name: "sort_by",
- arguments: []argSpec{
- {types: []jpType{jpArray}},
- {types: []jpType{jpExpref}},
- },
- handler: jpfSortBy,
- hasExpRef: true,
- },
- "join": {
- name: "join",
- arguments: []argSpec{
- {types: []jpType{jpString}},
- {types: []jpType{jpArrayString}},
- },
- handler: jpfJoin,
- },
- "reverse": {
- name: "reverse",
- arguments: []argSpec{
- {types: []jpType{jpArray, jpString}},
- },
- handler: jpfReverse,
- },
- "to_array": {
- name: "to_array",
- arguments: []argSpec{
- {types: []jpType{jpAny}},
- },
- handler: jpfToArray,
- },
- "to_string": {
- name: "to_string",
- arguments: []argSpec{
- {types: []jpType{jpAny}},
- },
- handler: jpfToString,
- },
- "to_number": {
- name: "to_number",
- arguments: []argSpec{
- {types: []jpType{jpAny}},
- },
- handler: jpfToNumber,
- },
- "not_null": {
- name: "not_null",
- arguments: []argSpec{
- {types: []jpType{jpAny}, variadic: true},
- },
- handler: jpfNotNull,
- },
- }
- return caller
-}
-
-func (e *functionEntry) resolveArgs(arguments []interface{}) ([]interface{}, error) {
- if len(e.arguments) == 0 {
- return arguments, nil
- }
- if !e.arguments[len(e.arguments)-1].variadic {
- if len(e.arguments) != len(arguments) {
- return nil, errors.New("incorrect number of args")
- }
- for i, spec := range e.arguments {
- userArg := arguments[i]
- err := spec.typeCheck(userArg)
- if err != nil {
- return nil, err
- }
- }
- return arguments, nil
- }
- if len(arguments) < len(e.arguments) {
- return nil, errors.New("Invalid arity.")
- }
- return arguments, nil
-}
-
-func (a *argSpec) typeCheck(arg interface{}) error {
- for _, t := range a.types {
- switch t {
- case jpNumber:
- if _, ok := arg.(float64); ok {
- return nil
- }
- case jpString:
- if _, ok := arg.(string); ok {
- return nil
- }
- case jpArray:
- if isSliceType(arg) {
- return nil
- }
- case jpObject:
- if _, ok := arg.(map[string]interface{}); ok {
- return nil
- }
- case jpArrayNumber:
- if _, ok := toArrayNum(arg); ok {
- return nil
- }
- case jpArrayString:
- if _, ok := toArrayStr(arg); ok {
- return nil
- }
- case jpAny:
- return nil
- case jpExpref:
- if _, ok := arg.(expRef); ok {
- return nil
- }
- }
- }
- return fmt.Errorf("Invalid type for: %v, expected: %#v", arg, a.types)
-}
-
-func (f *functionCaller) CallFunction(name string, arguments []interface{}, intr *treeInterpreter) (interface{}, error) {
- entry, ok := f.functionTable[name]
- if !ok {
- return nil, errors.New("unknown function: " + name)
- }
- resolvedArgs, err := entry.resolveArgs(arguments)
- if err != nil {
- return nil, err
- }
- if entry.hasExpRef {
- var extra []interface{}
- extra = append(extra, intr)
- resolvedArgs = append(extra, resolvedArgs...)
- }
- return entry.handler(resolvedArgs)
-}
-
-func jpfAbs(arguments []interface{}) (interface{}, error) {
- num := arguments[0].(float64)
- return math.Abs(num), nil
-}
-
-func jpfLength(arguments []interface{}) (interface{}, error) {
- arg := arguments[0]
- if c, ok := arg.(string); ok {
- return float64(utf8.RuneCountInString(c)), nil
- } else if isSliceType(arg) {
- v := reflect.ValueOf(arg)
- return float64(v.Len()), nil
- } else if c, ok := arg.(map[string]interface{}); ok {
- return float64(len(c)), nil
- }
- return nil, errors.New("could not compute length()")
-}
-
-func jpfStartsWith(arguments []interface{}) (interface{}, error) {
- search := arguments[0].(string)
- prefix := arguments[1].(string)
- return strings.HasPrefix(search, prefix), nil
-}
-
-func jpfAvg(arguments []interface{}) (interface{}, error) {
- // We've already type checked the value so we can safely use
- // type assertions.
- args := arguments[0].([]interface{})
- length := float64(len(args))
- numerator := 0.0
- for _, n := range args {
- numerator += n.(float64)
- }
- return numerator / length, nil
-}
-func jpfCeil(arguments []interface{}) (interface{}, error) {
- val := arguments[0].(float64)
- return math.Ceil(val), nil
-}
-func jpfContains(arguments []interface{}) (interface{}, error) {
- search := arguments[0]
- el := arguments[1]
- if searchStr, ok := search.(string); ok {
- if elStr, ok := el.(string); ok {
- return strings.Index(searchStr, elStr) != -1, nil
- }
- return false, nil
- }
- // Otherwise this is a generic contains for []interface{}
- general := search.([]interface{})
- for _, item := range general {
- if item == el {
- return true, nil
- }
- }
- return false, nil
-}
-func jpfEndsWith(arguments []interface{}) (interface{}, error) {
- search := arguments[0].(string)
- suffix := arguments[1].(string)
- return strings.HasSuffix(search, suffix), nil
-}
-func jpfFloor(arguments []interface{}) (interface{}, error) {
- val := arguments[0].(float64)
- return math.Floor(val), nil
-}
-func jpfMap(arguments []interface{}) (interface{}, error) {
- intr := arguments[0].(*treeInterpreter)
- exp := arguments[1].(expRef)
- node := exp.ref
- arr := arguments[2].([]interface{})
- mapped := make([]interface{}, 0, len(arr))
- for _, value := range arr {
- current, err := intr.Execute(node, value)
- if err != nil {
- return nil, err
- }
- mapped = append(mapped, current)
- }
- return mapped, nil
-}
-func jpfMax(arguments []interface{}) (interface{}, error) {
- if items, ok := toArrayNum(arguments[0]); ok {
- if len(items) == 0 {
- return nil, nil
- }
- if len(items) == 1 {
- return items[0], nil
- }
- best := items[0]
- for _, item := range items[1:] {
- if item > best {
- best = item
- }
- }
- return best, nil
- }
- // Otherwise we're dealing with a max() of strings.
- items, _ := toArrayStr(arguments[0])
- if len(items) == 0 {
- return nil, nil
- }
- if len(items) == 1 {
- return items[0], nil
- }
- best := items[0]
- for _, item := range items[1:] {
- if item > best {
- best = item
- }
- }
- return best, nil
-}
-func jpfMerge(arguments []interface{}) (interface{}, error) {
- final := make(map[string]interface{})
- for _, m := range arguments {
- mapped := m.(map[string]interface{})
- for key, value := range mapped {
- final[key] = value
- }
- }
- return final, nil
-}
-func jpfMaxBy(arguments []interface{}) (interface{}, error) {
- intr := arguments[0].(*treeInterpreter)
- arr := arguments[1].([]interface{})
- exp := arguments[2].(expRef)
- node := exp.ref
- if len(arr) == 0 {
- return nil, nil
- } else if len(arr) == 1 {
- return arr[0], nil
- }
- start, err := intr.Execute(node, arr[0])
- if err != nil {
- return nil, err
- }
- switch t := start.(type) {
- case float64:
- bestVal := t
- bestItem := arr[0]
- for _, item := range arr[1:] {
- result, err := intr.Execute(node, item)
- if err != nil {
- return nil, err
- }
- current, ok := result.(float64)
- if !ok {
- return nil, errors.New("invalid type, must be number")
- }
- if current > bestVal {
- bestVal = current
- bestItem = item
- }
- }
- return bestItem, nil
- case string:
- bestVal := t
- bestItem := arr[0]
- for _, item := range arr[1:] {
- result, err := intr.Execute(node, item)
- if err != nil {
- return nil, err
- }
- current, ok := result.(string)
- if !ok {
- return nil, errors.New("invalid type, must be string")
- }
- if current > bestVal {
- bestVal = current
- bestItem = item
- }
- }
- return bestItem, nil
- default:
- return nil, errors.New("invalid type, must be number of string")
- }
-}
-func jpfSum(arguments []interface{}) (interface{}, error) {
- items, _ := toArrayNum(arguments[0])
- sum := 0.0
- for _, item := range items {
- sum += item
- }
- return sum, nil
-}
-
-func jpfMin(arguments []interface{}) (interface{}, error) {
- if items, ok := toArrayNum(arguments[0]); ok {
- if len(items) == 0 {
- return nil, nil
- }
- if len(items) == 1 {
- return items[0], nil
- }
- best := items[0]
- for _, item := range items[1:] {
- if item < best {
- best = item
- }
- }
- return best, nil
- }
- items, _ := toArrayStr(arguments[0])
- if len(items) == 0 {
- return nil, nil
- }
- if len(items) == 1 {
- return items[0], nil
- }
- best := items[0]
- for _, item := range items[1:] {
- if item < best {
- best = item
- }
- }
- return best, nil
-}
-
-func jpfMinBy(arguments []interface{}) (interface{}, error) {
- intr := arguments[0].(*treeInterpreter)
- arr := arguments[1].([]interface{})
- exp := arguments[2].(expRef)
- node := exp.ref
- if len(arr) == 0 {
- return nil, nil
- } else if len(arr) == 1 {
- return arr[0], nil
- }
- start, err := intr.Execute(node, arr[0])
- if err != nil {
- return nil, err
- }
- if t, ok := start.(float64); ok {
- bestVal := t
- bestItem := arr[0]
- for _, item := range arr[1:] {
- result, err := intr.Execute(node, item)
- if err != nil {
- return nil, err
- }
- current, ok := result.(float64)
- if !ok {
- return nil, errors.New("invalid type, must be number")
- }
- if current < bestVal {
- bestVal = current
- bestItem = item
- }
- }
- return bestItem, nil
- } else if t, ok := start.(string); ok {
- bestVal := t
- bestItem := arr[0]
- for _, item := range arr[1:] {
- result, err := intr.Execute(node, item)
- if err != nil {
- return nil, err
- }
- current, ok := result.(string)
- if !ok {
- return nil, errors.New("invalid type, must be string")
- }
- if current < bestVal {
- bestVal = current
- bestItem = item
- }
- }
- return bestItem, nil
- } else {
- return nil, errors.New("invalid type, must be number of string")
- }
-}
-func jpfType(arguments []interface{}) (interface{}, error) {
- arg := arguments[0]
- if _, ok := arg.(float64); ok {
- return "number", nil
- }
- if _, ok := arg.(string); ok {
- return "string", nil
- }
- if _, ok := arg.([]interface{}); ok {
- return "array", nil
- }
- if _, ok := arg.(map[string]interface{}); ok {
- return "object", nil
- }
- if arg == nil {
- return "null", nil
- }
- if arg == true || arg == false {
- return "boolean", nil
- }
- return nil, errors.New("unknown type")
-}
-func jpfKeys(arguments []interface{}) (interface{}, error) {
- arg := arguments[0].(map[string]interface{})
- collected := make([]interface{}, 0, len(arg))
- for key := range arg {
- collected = append(collected, key)
- }
- return collected, nil
-}
-func jpfValues(arguments []interface{}) (interface{}, error) {
- arg := arguments[0].(map[string]interface{})
- collected := make([]interface{}, 0, len(arg))
- for _, value := range arg {
- collected = append(collected, value)
- }
- return collected, nil
-}
-func jpfSort(arguments []interface{}) (interface{}, error) {
- if items, ok := toArrayNum(arguments[0]); ok {
- d := sort.Float64Slice(items)
- sort.Stable(d)
- final := make([]interface{}, len(d))
- for i, val := range d {
- final[i] = val
- }
- return final, nil
- }
- // Otherwise we're dealing with sort()'ing strings.
- items, _ := toArrayStr(arguments[0])
- d := sort.StringSlice(items)
- sort.Stable(d)
- final := make([]interface{}, len(d))
- for i, val := range d {
- final[i] = val
- }
- return final, nil
-}
-func jpfSortBy(arguments []interface{}) (interface{}, error) {
- intr := arguments[0].(*treeInterpreter)
- arr := arguments[1].([]interface{})
- exp := arguments[2].(expRef)
- node := exp.ref
- if len(arr) == 0 {
- return arr, nil
- } else if len(arr) == 1 {
- return arr, nil
- }
- start, err := intr.Execute(node, arr[0])
- if err != nil {
- return nil, err
- }
- if _, ok := start.(float64); ok {
- sortable := &byExprFloat{intr, node, arr, false}
- sort.Stable(sortable)
- if sortable.hasError {
- return nil, errors.New("error in sort_by comparison")
- }
- return arr, nil
- } else if _, ok := start.(string); ok {
- sortable := &byExprString{intr, node, arr, false}
- sort.Stable(sortable)
- if sortable.hasError {
- return nil, errors.New("error in sort_by comparison")
- }
- return arr, nil
- } else {
- return nil, errors.New("invalid type, must be number of string")
- }
-}
-func jpfJoin(arguments []interface{}) (interface{}, error) {
- sep := arguments[0].(string)
- // We can't just do arguments[1].([]string), we have to
- // manually convert each item to a string.
- arrayStr := []string{}
- for _, item := range arguments[1].([]interface{}) {
- arrayStr = append(arrayStr, item.(string))
- }
- return strings.Join(arrayStr, sep), nil
-}
-func jpfReverse(arguments []interface{}) (interface{}, error) {
- if s, ok := arguments[0].(string); ok {
- r := []rune(s)
- for i, j := 0, len(r)-1; i < len(r)/2; i, j = i+1, j-1 {
- r[i], r[j] = r[j], r[i]
- }
- return string(r), nil
- }
- items := arguments[0].([]interface{})
- length := len(items)
- reversed := make([]interface{}, length)
- for i, item := range items {
- reversed[length-(i+1)] = item
- }
- return reversed, nil
-}
-func jpfToArray(arguments []interface{}) (interface{}, error) {
- if _, ok := arguments[0].([]interface{}); ok {
- return arguments[0], nil
- }
- return arguments[:1:1], nil
-}
-func jpfToString(arguments []interface{}) (interface{}, error) {
- if v, ok := arguments[0].(string); ok {
- return v, nil
- }
- result, err := json.Marshal(arguments[0])
- if err != nil {
- return nil, err
- }
- return string(result), nil
-}
-func jpfToNumber(arguments []interface{}) (interface{}, error) {
- arg := arguments[0]
- if v, ok := arg.(float64); ok {
- return v, nil
- }
- if v, ok := arg.(string); ok {
- conv, err := strconv.ParseFloat(v, 64)
- if err != nil {
- return nil, nil
- }
- return conv, nil
- }
- if _, ok := arg.([]interface{}); ok {
- return nil, nil
- }
- if _, ok := arg.(map[string]interface{}); ok {
- return nil, nil
- }
- if arg == nil {
- return nil, nil
- }
- if arg == true || arg == false {
- return nil, nil
- }
- return nil, errors.New("unknown type")
-}
-func jpfNotNull(arguments []interface{}) (interface{}, error) {
- for _, arg := range arguments {
- if arg != nil {
- return arg, nil
- }
- }
- return nil, nil
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/fuzz/jmespath.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/fuzz/jmespath.go
deleted file mode 100644
index c7df0878..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/fuzz/jmespath.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package jmespath
-
-import "github.com/jmespath/go-jmespath"
-
-// Fuzz will fuzz test the JMESPath parser.
-func Fuzz(data []byte) int {
- p := jmespath.NewParser()
- _, err := p.Parse(string(data))
- if err != nil {
- return 1
- }
- return 0
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/interpreter.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/interpreter.go
deleted file mode 100644
index 13c74604..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/interpreter.go
+++ /dev/null
@@ -1,418 +0,0 @@
-package jmespath
-
-import (
- "errors"
- "reflect"
- "unicode"
- "unicode/utf8"
-)
-
-/* This is a tree based interpreter. It walks the AST and directly
- interprets the AST to search through a JSON document.
-*/
-
-type treeInterpreter struct {
- fCall *functionCaller
-}
-
-func newInterpreter() *treeInterpreter {
- interpreter := treeInterpreter{}
- interpreter.fCall = newFunctionCaller()
- return &interpreter
-}
-
-type expRef struct {
- ref ASTNode
-}
-
-// Execute takes an ASTNode and input data and interprets the AST directly.
-// It will produce the result of applying the JMESPath expression associated
-// with the ASTNode to the input data "value".
-func (intr *treeInterpreter) Execute(node ASTNode, value interface{}) (interface{}, error) {
- switch node.nodeType {
- case ASTComparator:
- left, err := intr.Execute(node.children[0], value)
- if err != nil {
- return nil, err
- }
- right, err := intr.Execute(node.children[1], value)
- if err != nil {
- return nil, err
- }
- switch node.value {
- case tEQ:
- return objsEqual(left, right), nil
- case tNE:
- return !objsEqual(left, right), nil
- }
- leftNum, ok := left.(float64)
- if !ok {
- return nil, nil
- }
- rightNum, ok := right.(float64)
- if !ok {
- return nil, nil
- }
- switch node.value {
- case tGT:
- return leftNum > rightNum, nil
- case tGTE:
- return leftNum >= rightNum, nil
- case tLT:
- return leftNum < rightNum, nil
- case tLTE:
- return leftNum <= rightNum, nil
- }
- case ASTExpRef:
- return expRef{ref: node.children[0]}, nil
- case ASTFunctionExpression:
- resolvedArgs := []interface{}{}
- for _, arg := range node.children {
- current, err := intr.Execute(arg, value)
- if err != nil {
- return nil, err
- }
- resolvedArgs = append(resolvedArgs, current)
- }
- return intr.fCall.CallFunction(node.value.(string), resolvedArgs, intr)
- case ASTField:
- if m, ok := value.(map[string]interface{}); ok {
- key := node.value.(string)
- return m[key], nil
- }
- return intr.fieldFromStruct(node.value.(string), value)
- case ASTFilterProjection:
- left, err := intr.Execute(node.children[0], value)
- if err != nil {
- return nil, nil
- }
- sliceType, ok := left.([]interface{})
- if !ok {
- if isSliceType(left) {
- return intr.filterProjectionWithReflection(node, left)
- }
- return nil, nil
- }
- compareNode := node.children[2]
- collected := []interface{}{}
- for _, element := range sliceType {
- result, err := intr.Execute(compareNode, element)
- if err != nil {
- return nil, err
- }
- if !isFalse(result) {
- current, err := intr.Execute(node.children[1], element)
- if err != nil {
- return nil, err
- }
- if current != nil {
- collected = append(collected, current)
- }
- }
- }
- return collected, nil
- case ASTFlatten:
- left, err := intr.Execute(node.children[0], value)
- if err != nil {
- return nil, nil
- }
- sliceType, ok := left.([]interface{})
- if !ok {
- // If we can't type convert to []interface{}, there's
- // a chance this could still work via reflection if we're
- // dealing with user provided types.
- if isSliceType(left) {
- return intr.flattenWithReflection(left)
- }
- return nil, nil
- }
- flattened := []interface{}{}
- for _, element := range sliceType {
- if elementSlice, ok := element.([]interface{}); ok {
- flattened = append(flattened, elementSlice...)
- } else if isSliceType(element) {
- reflectFlat := []interface{}{}
- v := reflect.ValueOf(element)
- for i := 0; i < v.Len(); i++ {
- reflectFlat = append(reflectFlat, v.Index(i).Interface())
- }
- flattened = append(flattened, reflectFlat...)
- } else {
- flattened = append(flattened, element)
- }
- }
- return flattened, nil
- case ASTIdentity, ASTCurrentNode:
- return value, nil
- case ASTIndex:
- if sliceType, ok := value.([]interface{}); ok {
- index := node.value.(int)
- if index < 0 {
- index += len(sliceType)
- }
- if index < len(sliceType) && index >= 0 {
- return sliceType[index], nil
- }
- return nil, nil
- }
- // Otherwise try via reflection.
- rv := reflect.ValueOf(value)
- if rv.Kind() == reflect.Slice {
- index := node.value.(int)
- if index < 0 {
- index += rv.Len()
- }
- if index < rv.Len() && index >= 0 {
- v := rv.Index(index)
- return v.Interface(), nil
- }
- }
- return nil, nil
- case ASTKeyValPair:
- return intr.Execute(node.children[0], value)
- case ASTLiteral:
- return node.value, nil
- case ASTMultiSelectHash:
- if value == nil {
- return nil, nil
- }
- collected := make(map[string]interface{})
- for _, child := range node.children {
- current, err := intr.Execute(child, value)
- if err != nil {
- return nil, err
- }
- key := child.value.(string)
- collected[key] = current
- }
- return collected, nil
- case ASTMultiSelectList:
- if value == nil {
- return nil, nil
- }
- collected := []interface{}{}
- for _, child := range node.children {
- current, err := intr.Execute(child, value)
- if err != nil {
- return nil, err
- }
- collected = append(collected, current)
- }
- return collected, nil
- case ASTOrExpression:
- matched, err := intr.Execute(node.children[0], value)
- if err != nil {
- return nil, err
- }
- if isFalse(matched) {
- matched, err = intr.Execute(node.children[1], value)
- if err != nil {
- return nil, err
- }
- }
- return matched, nil
- case ASTAndExpression:
- matched, err := intr.Execute(node.children[0], value)
- if err != nil {
- return nil, err
- }
- if isFalse(matched) {
- return matched, nil
- }
- return intr.Execute(node.children[1], value)
- case ASTNotExpression:
- matched, err := intr.Execute(node.children[0], value)
- if err != nil {
- return nil, err
- }
- if isFalse(matched) {
- return true, nil
- }
- return false, nil
- case ASTPipe:
- result := value
- var err error
- for _, child := range node.children {
- result, err = intr.Execute(child, result)
- if err != nil {
- return nil, err
- }
- }
- return result, nil
- case ASTProjection:
- left, err := intr.Execute(node.children[0], value)
- if err != nil {
- return nil, err
- }
- sliceType, ok := left.([]interface{})
- if !ok {
- if isSliceType(left) {
- return intr.projectWithReflection(node, left)
- }
- return nil, nil
- }
- collected := []interface{}{}
- var current interface{}
- for _, element := range sliceType {
- current, err = intr.Execute(node.children[1], element)
- if err != nil {
- return nil, err
- }
- if current != nil {
- collected = append(collected, current)
- }
- }
- return collected, nil
- case ASTSubexpression, ASTIndexExpression:
- left, err := intr.Execute(node.children[0], value)
- if err != nil {
- return nil, err
- }
- return intr.Execute(node.children[1], left)
- case ASTSlice:
- sliceType, ok := value.([]interface{})
- if !ok {
- if isSliceType(value) {
- return intr.sliceWithReflection(node, value)
- }
- return nil, nil
- }
- parts := node.value.([]*int)
- sliceParams := make([]sliceParam, 3)
- for i, part := range parts {
- if part != nil {
- sliceParams[i].Specified = true
- sliceParams[i].N = *part
- }
- }
- return slice(sliceType, sliceParams)
- case ASTValueProjection:
- left, err := intr.Execute(node.children[0], value)
- if err != nil {
- return nil, nil
- }
- mapType, ok := left.(map[string]interface{})
- if !ok {
- return nil, nil
- }
- values := make([]interface{}, len(mapType))
- for _, value := range mapType {
- values = append(values, value)
- }
- collected := []interface{}{}
- for _, element := range values {
- current, err := intr.Execute(node.children[1], element)
- if err != nil {
- return nil, err
- }
- if current != nil {
- collected = append(collected, current)
- }
- }
- return collected, nil
- }
- return nil, errors.New("Unknown AST node: " + node.nodeType.String())
-}
-
-func (intr *treeInterpreter) fieldFromStruct(key string, value interface{}) (interface{}, error) {
- rv := reflect.ValueOf(value)
- first, n := utf8.DecodeRuneInString(key)
- fieldName := string(unicode.ToUpper(first)) + key[n:]
- if rv.Kind() == reflect.Struct {
- v := rv.FieldByName(fieldName)
- if !v.IsValid() {
- return nil, nil
- }
- return v.Interface(), nil
- } else if rv.Kind() == reflect.Ptr {
- // Handle multiple levels of indirection?
- if rv.IsNil() {
- return nil, nil
- }
- rv = rv.Elem()
- v := rv.FieldByName(fieldName)
- if !v.IsValid() {
- return nil, nil
- }
- return v.Interface(), nil
- }
- return nil, nil
-}
-
-func (intr *treeInterpreter) flattenWithReflection(value interface{}) (interface{}, error) {
- v := reflect.ValueOf(value)
- flattened := []interface{}{}
- for i := 0; i < v.Len(); i++ {
- element := v.Index(i).Interface()
- if reflect.TypeOf(element).Kind() == reflect.Slice {
- // Then insert the contents of the element
- // slice into the flattened slice,
- // i.e flattened = append(flattened, mySlice...)
- elementV := reflect.ValueOf(element)
- for j := 0; j < elementV.Len(); j++ {
- flattened = append(
- flattened, elementV.Index(j).Interface())
- }
- } else {
- flattened = append(flattened, element)
- }
- }
- return flattened, nil
-}
-
-func (intr *treeInterpreter) sliceWithReflection(node ASTNode, value interface{}) (interface{}, error) {
- v := reflect.ValueOf(value)
- parts := node.value.([]*int)
- sliceParams := make([]sliceParam, 3)
- for i, part := range parts {
- if part != nil {
- sliceParams[i].Specified = true
- sliceParams[i].N = *part
- }
- }
- final := []interface{}{}
- for i := 0; i < v.Len(); i++ {
- element := v.Index(i).Interface()
- final = append(final, element)
- }
- return slice(final, sliceParams)
-}
-
-func (intr *treeInterpreter) filterProjectionWithReflection(node ASTNode, value interface{}) (interface{}, error) {
- compareNode := node.children[2]
- collected := []interface{}{}
- v := reflect.ValueOf(value)
- for i := 0; i < v.Len(); i++ {
- element := v.Index(i).Interface()
- result, err := intr.Execute(compareNode, element)
- if err != nil {
- return nil, err
- }
- if !isFalse(result) {
- current, err := intr.Execute(node.children[1], element)
- if err != nil {
- return nil, err
- }
- if current != nil {
- collected = append(collected, current)
- }
- }
- }
- return collected, nil
-}
-
-func (intr *treeInterpreter) projectWithReflection(node ASTNode, value interface{}) (interface{}, error) {
- collected := []interface{}{}
- v := reflect.ValueOf(value)
- for i := 0; i < v.Len(); i++ {
- element := v.Index(i).Interface()
- result, err := intr.Execute(node.children[1], element)
- if err != nil {
- return nil, err
- }
- if result != nil {
- collected = append(collected, result)
- }
- }
- return collected, nil
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/interpreter_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/interpreter_test.go
deleted file mode 100644
index 11c6d0aa..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/interpreter_test.go
+++ /dev/null
@@ -1,221 +0,0 @@
-package jmespath
-
-import (
- "encoding/json"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-type scalars struct {
- Foo string
- Bar string
-}
-
-type sliceType struct {
- A string
- B []scalars
- C []*scalars
-}
-
-type benchmarkStruct struct {
- Fooasdfasdfasdfasdf string
-}
-
-type benchmarkNested struct {
- Fooasdfasdfasdfasdf nestedA
-}
-
-type nestedA struct {
- Fooasdfasdfasdfasdf nestedB
-}
-
-type nestedB struct {
- Fooasdfasdfasdfasdf nestedC
-}
-
-type nestedC struct {
- Fooasdfasdfasdfasdf string
-}
-
-type nestedSlice struct {
- A []sliceType
-}
-
-func TestCanSupportEmptyInterface(t *testing.T) {
- assert := assert.New(t)
- data := make(map[string]interface{})
- data["foo"] = "bar"
- result, err := Search("foo", data)
- assert.Nil(err)
- assert.Equal("bar", result)
-}
-
-func TestCanSupportUserDefinedStructsValue(t *testing.T) {
- assert := assert.New(t)
- s := scalars{Foo: "one", Bar: "bar"}
- result, err := Search("Foo", s)
- assert.Nil(err)
- assert.Equal("one", result)
-}
-
-func TestCanSupportUserDefinedStructsRef(t *testing.T) {
- assert := assert.New(t)
- s := scalars{Foo: "one", Bar: "bar"}
- result, err := Search("Foo", &s)
- assert.Nil(err)
- assert.Equal("one", result)
-}
-
-func TestCanSupportStructWithSliceAll(t *testing.T) {
- assert := assert.New(t)
- data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
- result, err := Search("B[].Foo", data)
- assert.Nil(err)
- assert.Equal([]interface{}{"f1", "correct"}, result)
-}
-
-func TestCanSupportStructWithSlicingExpression(t *testing.T) {
- assert := assert.New(t)
- data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
- result, err := Search("B[:].Foo", data)
- assert.Nil(err)
- assert.Equal([]interface{}{"f1", "correct"}, result)
-}
-
-func TestCanSupportStructWithFilterProjection(t *testing.T) {
- assert := assert.New(t)
- data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
- result, err := Search("B[? `true` ].Foo", data)
- assert.Nil(err)
- assert.Equal([]interface{}{"f1", "correct"}, result)
-}
-
-func TestCanSupportStructWithSlice(t *testing.T) {
- assert := assert.New(t)
- data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
- result, err := Search("B[-1].Foo", data)
- assert.Nil(err)
- assert.Equal("correct", result)
-}
-
-func TestCanSupportStructWithOrExpressions(t *testing.T) {
- assert := assert.New(t)
- data := sliceType{A: "foo", C: nil}
- result, err := Search("C || A", data)
- assert.Nil(err)
- assert.Equal("foo", result)
-}
-
-func TestCanSupportStructWithSlicePointer(t *testing.T) {
- assert := assert.New(t)
- data := sliceType{A: "foo", C: []*scalars{{"f1", "b1"}, {"correct", "b2"}}}
- result, err := Search("C[-1].Foo", data)
- assert.Nil(err)
- assert.Equal("correct", result)
-}
-
-func TestWillAutomaticallyCapitalizeFieldNames(t *testing.T) {
- assert := assert.New(t)
- s := scalars{Foo: "one", Bar: "bar"}
- // Note that there's a lower cased "foo" instead of "Foo",
- // but it should still correspond to the Foo field in the
- // scalars struct
- result, err := Search("foo", &s)
- assert.Nil(err)
- assert.Equal("one", result)
-}
-
-func TestCanSupportStructWithSliceLowerCased(t *testing.T) {
- assert := assert.New(t)
- data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
- result, err := Search("b[-1].foo", data)
- assert.Nil(err)
- assert.Equal("correct", result)
-}
-
-func TestCanSupportStructWithNestedPointers(t *testing.T) {
- assert := assert.New(t)
- data := struct{ A *struct{ B int } }{}
- result, err := Search("A.B", data)
- assert.Nil(err)
- assert.Nil(result)
-}
-
-func TestCanSupportFlattenNestedSlice(t *testing.T) {
- assert := assert.New(t)
- data := nestedSlice{A: []sliceType{
- {B: []scalars{{Foo: "f1a"}, {Foo: "f1b"}}},
- {B: []scalars{{Foo: "f2a"}, {Foo: "f2b"}}},
- }}
- result, err := Search("A[].B[].Foo", data)
- assert.Nil(err)
- assert.Equal([]interface{}{"f1a", "f1b", "f2a", "f2b"}, result)
-}
-
-func TestCanSupportFlattenNestedEmptySlice(t *testing.T) {
- assert := assert.New(t)
- data := nestedSlice{A: []sliceType{
- {}, {B: []scalars{{Foo: "a"}}},
- }}
- result, err := Search("A[].B[].Foo", data)
- assert.Nil(err)
- assert.Equal([]interface{}{"a"}, result)
-}
-
-func TestCanSupportProjectionsWithStructs(t *testing.T) {
- assert := assert.New(t)
- data := nestedSlice{A: []sliceType{
- {A: "first"}, {A: "second"}, {A: "third"},
- }}
- result, err := Search("A[*].A", data)
- assert.Nil(err)
- assert.Equal([]interface{}{"first", "second", "third"}, result)
-}
-
-func TestCanSupportSliceOfStructsWithFunctions(t *testing.T) {
- assert := assert.New(t)
- data := []scalars{scalars{"a1", "b1"}, scalars{"a2", "b2"}}
- result, err := Search("length(@)", data)
- assert.Nil(err)
- assert.Equal(result.(float64), 2.0)
-}
-
-func BenchmarkInterpretSingleFieldStruct(b *testing.B) {
- intr := newInterpreter()
- parser := NewParser()
- ast, _ := parser.Parse("fooasdfasdfasdfasdf")
- data := benchmarkStruct{"foobarbazqux"}
- for i := 0; i < b.N; i++ {
- intr.Execute(ast, &data)
- }
-}
-
-func BenchmarkInterpretNestedStruct(b *testing.B) {
- intr := newInterpreter()
- parser := NewParser()
- ast, _ := parser.Parse("fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf")
- data := benchmarkNested{
- nestedA{
- nestedB{
- nestedC{"foobarbazqux"},
- },
- },
- }
- for i := 0; i < b.N; i++ {
- intr.Execute(ast, &data)
- }
-}
-
-func BenchmarkInterpretNestedMaps(b *testing.B) {
- jsonData := []byte(`{"fooasdfasdfasdfasdf": {"fooasdfasdfasdfasdf": {"fooasdfasdfasdfasdf": {"fooasdfasdfasdfasdf": "foobarbazqux"}}}}`)
- var data interface{}
- json.Unmarshal(jsonData, &data)
-
- intr := newInterpreter()
- parser := NewParser()
- ast, _ := parser.Parse("fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf")
- for i := 0; i < b.N; i++ {
- intr.Execute(ast, data)
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/lexer.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/lexer.go
deleted file mode 100644
index 817900c8..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/lexer.go
+++ /dev/null
@@ -1,420 +0,0 @@
-package jmespath
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
- "strconv"
- "strings"
- "unicode/utf8"
-)
-
-type token struct {
- tokenType tokType
- value string
- position int
- length int
-}
-
-type tokType int
-
-const eof = -1
-
-// Lexer contains information about the expression being tokenized.
-type Lexer struct {
- expression string // The expression provided by the user.
- currentPos int // The current position in the string.
- lastWidth int // The width of the current rune. This
- buf bytes.Buffer // Internal buffer used for building up values.
-}
-
-// SyntaxError is the main error used whenever a lexing or parsing error occurs.
-type SyntaxError struct {
- msg string // Error message displayed to user
- Expression string // Expression that generated a SyntaxError
- Offset int // The location in the string where the error occurred
-}
-
-func (e SyntaxError) Error() string {
- // In the future, it would be good to underline the specific
- // location where the error occurred.
- return "SyntaxError: " + e.msg
-}
-
-// HighlightLocation will show where the syntax error occurred.
-// It will place a "^" character on a line below the expression
-// at the point where the syntax error occurred.
-func (e SyntaxError) HighlightLocation() string {
- return e.Expression + "\n" + strings.Repeat(" ", e.Offset) + "^"
-}
-
-//go:generate stringer -type=tokType
-const (
- tUnknown tokType = iota
- tStar
- tDot
- tFilter
- tFlatten
- tLparen
- tRparen
- tLbracket
- tRbracket
- tLbrace
- tRbrace
- tOr
- tPipe
- tNumber
- tUnquotedIdentifier
- tQuotedIdentifier
- tComma
- tColon
- tLT
- tLTE
- tGT
- tGTE
- tEQ
- tNE
- tJSONLiteral
- tStringLiteral
- tCurrent
- tExpref
- tAnd
- tNot
- tEOF
-)
-
-var basicTokens = map[rune]tokType{
- '.': tDot,
- '*': tStar,
- ',': tComma,
- ':': tColon,
- '{': tLbrace,
- '}': tRbrace,
- ']': tRbracket, // tLbracket not included because it could be "[]"
- '(': tLparen,
- ')': tRparen,
- '@': tCurrent,
-}
-
-// Bit mask for [a-zA-Z_] shifted down 64 bits to fit in a single uint64.
-// When using this bitmask just be sure to shift the rune down 64 bits
-// before checking against identifierStartBits.
-const identifierStartBits uint64 = 576460745995190270
-
-// Bit mask for [a-zA-Z0-9], 128 bits -> 2 uint64s.
-var identifierTrailingBits = [2]uint64{287948901175001088, 576460745995190270}
-
-var whiteSpace = map[rune]bool{
- ' ': true, '\t': true, '\n': true, '\r': true,
-}
-
-func (t token) String() string {
- return fmt.Sprintf("Token{%+v, %s, %d, %d}",
- t.tokenType, t.value, t.position, t.length)
-}
-
-// NewLexer creates a new JMESPath lexer.
-func NewLexer() *Lexer {
- lexer := Lexer{}
- return &lexer
-}
-
-func (lexer *Lexer) next() rune {
- if lexer.currentPos >= len(lexer.expression) {
- lexer.lastWidth = 0
- return eof
- }
- r, w := utf8.DecodeRuneInString(lexer.expression[lexer.currentPos:])
- lexer.lastWidth = w
- lexer.currentPos += w
- return r
-}
-
-func (lexer *Lexer) back() {
- lexer.currentPos -= lexer.lastWidth
-}
-
-func (lexer *Lexer) peek() rune {
- t := lexer.next()
- lexer.back()
- return t
-}
-
-// tokenize takes an expression and returns corresponding tokens.
-func (lexer *Lexer) tokenize(expression string) ([]token, error) {
- var tokens []token
- lexer.expression = expression
- lexer.currentPos = 0
- lexer.lastWidth = 0
-loop:
- for {
- r := lexer.next()
- if identifierStartBits&(1<<(uint64(r)-64)) > 0 {
- t := lexer.consumeUnquotedIdentifier()
- tokens = append(tokens, t)
- } else if val, ok := basicTokens[r]; ok {
- // Basic single char token.
- t := token{
- tokenType: val,
- value: string(r),
- position: lexer.currentPos - lexer.lastWidth,
- length: 1,
- }
- tokens = append(tokens, t)
- } else if r == '-' || (r >= '0' && r <= '9') {
- t := lexer.consumeNumber()
- tokens = append(tokens, t)
- } else if r == '[' {
- t := lexer.consumeLBracket()
- tokens = append(tokens, t)
- } else if r == '"' {
- t, err := lexer.consumeQuotedIdentifier()
- if err != nil {
- return tokens, err
- }
- tokens = append(tokens, t)
- } else if r == '\'' {
- t, err := lexer.consumeRawStringLiteral()
- if err != nil {
- return tokens, err
- }
- tokens = append(tokens, t)
- } else if r == '`' {
- t, err := lexer.consumeLiteral()
- if err != nil {
- return tokens, err
- }
- tokens = append(tokens, t)
- } else if r == '|' {
- t := lexer.matchOrElse(r, '|', tOr, tPipe)
- tokens = append(tokens, t)
- } else if r == '<' {
- t := lexer.matchOrElse(r, '=', tLTE, tLT)
- tokens = append(tokens, t)
- } else if r == '>' {
- t := lexer.matchOrElse(r, '=', tGTE, tGT)
- tokens = append(tokens, t)
- } else if r == '!' {
- t := lexer.matchOrElse(r, '=', tNE, tNot)
- tokens = append(tokens, t)
- } else if r == '=' {
- t := lexer.matchOrElse(r, '=', tEQ, tUnknown)
- tokens = append(tokens, t)
- } else if r == '&' {
- t := lexer.matchOrElse(r, '&', tAnd, tExpref)
- tokens = append(tokens, t)
- } else if r == eof {
- break loop
- } else if _, ok := whiteSpace[r]; ok {
- // Ignore whitespace
- } else {
- return tokens, lexer.syntaxError(fmt.Sprintf("Unknown char: %s", strconv.QuoteRuneToASCII(r)))
- }
- }
- tokens = append(tokens, token{tEOF, "", len(lexer.expression), 0})
- return tokens, nil
-}
-
-// Consume characters until the ending rune "r" is reached.
-// If the end of the expression is reached before seeing the
-// terminating rune "r", then an error is returned.
-// If no error occurs then the matching substring is returned.
-// The returned string will not include the ending rune.
-func (lexer *Lexer) consumeUntil(end rune) (string, error) {
- start := lexer.currentPos
- current := lexer.next()
- for current != end && current != eof {
- if current == '\\' && lexer.peek() != eof {
- lexer.next()
- }
- current = lexer.next()
- }
- if lexer.lastWidth == 0 {
- // Then we hit an EOF so we never reached the closing
- // delimiter.
- return "", SyntaxError{
- msg: "Unclosed delimiter: " + string(end),
- Expression: lexer.expression,
- Offset: len(lexer.expression),
- }
- }
- return lexer.expression[start : lexer.currentPos-lexer.lastWidth], nil
-}
-
-func (lexer *Lexer) consumeLiteral() (token, error) {
- start := lexer.currentPos
- value, err := lexer.consumeUntil('`')
- if err != nil {
- return token{}, err
- }
- value = strings.Replace(value, "\\`", "`", -1)
- return token{
- tokenType: tJSONLiteral,
- value: value,
- position: start,
- length: len(value),
- }, nil
-}
-
-func (lexer *Lexer) consumeRawStringLiteral() (token, error) {
- start := lexer.currentPos
- currentIndex := start
- current := lexer.next()
- for current != '\'' && lexer.peek() != eof {
- if current == '\\' && lexer.peek() == '\'' {
- chunk := lexer.expression[currentIndex : lexer.currentPos-1]
- lexer.buf.WriteString(chunk)
- lexer.buf.WriteString("'")
- lexer.next()
- currentIndex = lexer.currentPos
- }
- current = lexer.next()
- }
- if lexer.lastWidth == 0 {
- // Then we hit an EOF so we never reached the closing
- // delimiter.
- return token{}, SyntaxError{
- msg: "Unclosed delimiter: '",
- Expression: lexer.expression,
- Offset: len(lexer.expression),
- }
- }
- if currentIndex < lexer.currentPos {
- lexer.buf.WriteString(lexer.expression[currentIndex : lexer.currentPos-1])
- }
- value := lexer.buf.String()
- // Reset the buffer so it can reused again.
- lexer.buf.Reset()
- return token{
- tokenType: tStringLiteral,
- value: value,
- position: start,
- length: len(value),
- }, nil
-}
-
-func (lexer *Lexer) syntaxError(msg string) SyntaxError {
- return SyntaxError{
- msg: msg,
- Expression: lexer.expression,
- Offset: lexer.currentPos - 1,
- }
-}
-
-// Checks for a two char token, otherwise matches a single character
-// token. This is used whenever a two char token overlaps a single
-// char token, e.g. "||" -> tPipe, "|" -> tOr.
-func (lexer *Lexer) matchOrElse(first rune, second rune, matchedType tokType, singleCharType tokType) token {
- start := lexer.currentPos - lexer.lastWidth
- nextRune := lexer.next()
- var t token
- if nextRune == second {
- t = token{
- tokenType: matchedType,
- value: string(first) + string(second),
- position: start,
- length: 2,
- }
- } else {
- lexer.back()
- t = token{
- tokenType: singleCharType,
- value: string(first),
- position: start,
- length: 1,
- }
- }
- return t
-}
-
-func (lexer *Lexer) consumeLBracket() token {
- // There's three options here:
- // 1. A filter expression "[?"
- // 2. A flatten operator "[]"
- // 3. A bare rbracket "["
- start := lexer.currentPos - lexer.lastWidth
- nextRune := lexer.next()
- var t token
- if nextRune == '?' {
- t = token{
- tokenType: tFilter,
- value: "[?",
- position: start,
- length: 2,
- }
- } else if nextRune == ']' {
- t = token{
- tokenType: tFlatten,
- value: "[]",
- position: start,
- length: 2,
- }
- } else {
- t = token{
- tokenType: tLbracket,
- value: "[",
- position: start,
- length: 1,
- }
- lexer.back()
- }
- return t
-}
-
-func (lexer *Lexer) consumeQuotedIdentifier() (token, error) {
- start := lexer.currentPos
- value, err := lexer.consumeUntil('"')
- if err != nil {
- return token{}, err
- }
- var decoded string
- asJSON := []byte("\"" + value + "\"")
- if err := json.Unmarshal([]byte(asJSON), &decoded); err != nil {
- return token{}, err
- }
- return token{
- tokenType: tQuotedIdentifier,
- value: decoded,
- position: start - 1,
- length: len(decoded),
- }, nil
-}
-
-func (lexer *Lexer) consumeUnquotedIdentifier() token {
- // Consume runes until we reach the end of an unquoted
- // identifier.
- start := lexer.currentPos - lexer.lastWidth
- for {
- r := lexer.next()
- if r < 0 || r > 128 || identifierTrailingBits[uint64(r)/64]&(1<<(uint64(r)%64)) == 0 {
- lexer.back()
- break
- }
- }
- value := lexer.expression[start:lexer.currentPos]
- return token{
- tokenType: tUnquotedIdentifier,
- value: value,
- position: start,
- length: lexer.currentPos - start,
- }
-}
-
-func (lexer *Lexer) consumeNumber() token {
- // Consume runes until we reach something that's not a number.
- start := lexer.currentPos - lexer.lastWidth
- for {
- r := lexer.next()
- if r < '0' || r > '9' {
- lexer.back()
- break
- }
- }
- value := lexer.expression[start:lexer.currentPos]
- return token{
- tokenType: tNumber,
- value: value,
- position: start,
- length: lexer.currentPos - start,
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/lexer_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/lexer_test.go
deleted file mode 100644
index d13a042d..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/lexer_test.go
+++ /dev/null
@@ -1,161 +0,0 @@
-package jmespath
-
-import (
- "fmt"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-var lexingTests = []struct {
- expression string
- expected []token
-}{
- {"*", []token{{tStar, "*", 0, 1}}},
- {".", []token{{tDot, ".", 0, 1}}},
- {"[?", []token{{tFilter, "[?", 0, 2}}},
- {"[]", []token{{tFlatten, "[]", 0, 2}}},
- {"(", []token{{tLparen, "(", 0, 1}}},
- {")", []token{{tRparen, ")", 0, 1}}},
- {"[", []token{{tLbracket, "[", 0, 1}}},
- {"]", []token{{tRbracket, "]", 0, 1}}},
- {"{", []token{{tLbrace, "{", 0, 1}}},
- {"}", []token{{tRbrace, "}", 0, 1}}},
- {"||", []token{{tOr, "||", 0, 2}}},
- {"|", []token{{tPipe, "|", 0, 1}}},
- {"29", []token{{tNumber, "29", 0, 2}}},
- {"2", []token{{tNumber, "2", 0, 1}}},
- {"0", []token{{tNumber, "0", 0, 1}}},
- {"-20", []token{{tNumber, "-20", 0, 3}}},
- {"foo", []token{{tUnquotedIdentifier, "foo", 0, 3}}},
- {`"bar"`, []token{{tQuotedIdentifier, "bar", 0, 3}}},
- // Escaping the delimiter
- {`"bar\"baz"`, []token{{tQuotedIdentifier, `bar"baz`, 0, 7}}},
- {",", []token{{tComma, ",", 0, 1}}},
- {":", []token{{tColon, ":", 0, 1}}},
- {"<", []token{{tLT, "<", 0, 1}}},
- {"<=", []token{{tLTE, "<=", 0, 2}}},
- {">", []token{{tGT, ">", 0, 1}}},
- {">=", []token{{tGTE, ">=", 0, 2}}},
- {"==", []token{{tEQ, "==", 0, 2}}},
- {"!=", []token{{tNE, "!=", 0, 2}}},
- {"`[0, 1, 2]`", []token{{tJSONLiteral, "[0, 1, 2]", 1, 9}}},
- {"'foo'", []token{{tStringLiteral, "foo", 1, 3}}},
- {"'a'", []token{{tStringLiteral, "a", 1, 1}}},
- {`'foo\'bar'`, []token{{tStringLiteral, "foo'bar", 1, 7}}},
- {"@", []token{{tCurrent, "@", 0, 1}}},
- {"&", []token{{tExpref, "&", 0, 1}}},
- // Quoted identifier unicode escape sequences
- {`"\u2713"`, []token{{tQuotedIdentifier, "✓", 0, 3}}},
- {`"\\"`, []token{{tQuotedIdentifier, `\`, 0, 1}}},
- {"`\"foo\"`", []token{{tJSONLiteral, "\"foo\"", 1, 5}}},
- // Combinations of tokens.
- {"foo.bar", []token{
- {tUnquotedIdentifier, "foo", 0, 3},
- {tDot, ".", 3, 1},
- {tUnquotedIdentifier, "bar", 4, 3},
- }},
- {"foo[0]", []token{
- {tUnquotedIdentifier, "foo", 0, 3},
- {tLbracket, "[", 3, 1},
- {tNumber, "0", 4, 1},
- {tRbracket, "]", 5, 1},
- }},
- {"foo[?a<b]", []token{
- {tUnquotedIdentifier, "foo", 0, 3},
- {tFilter, "[?", 3, 2},
- {tUnquotedIdentifier, "a", 5, 1},
- {tLT, "<", 6, 1},
- {tUnquotedIdentifier, "b", 7, 1},
- {tRbracket, "]", 8, 1},
- }},
-}
-
-func TestCanLexTokens(t *testing.T) {
- assert := assert.New(t)
- lexer := NewLexer()
- for _, tt := range lexingTests {
- tokens, err := lexer.tokenize(tt.expression)
- if assert.Nil(err) {
- errMsg := fmt.Sprintf("Mismatch expected number of tokens: (expected: %s, actual: %s)",
- tt.expected, tokens)
- tt.expected = append(tt.expected, token{tEOF, "", len(tt.expression), 0})
- if assert.Equal(len(tt.expected), len(tokens), errMsg) {
- for i, token := range tokens {
- expected := tt.expected[i]
- assert.Equal(expected, token, "Token not equal")
- }
- }
- }
- }
-}
-
-var lexingErrorTests = []struct {
- expression string
- msg string
-}{
- {"'foo", "Missing closing single quote"},
- {"[?foo==bar?]", "Unknown char '?'"},
-}
-
-func TestLexingErrors(t *testing.T) {
- assert := assert.New(t)
- lexer := NewLexer()
- for _, tt := range lexingErrorTests {
- _, err := lexer.tokenize(tt.expression)
- assert.NotNil(err, fmt.Sprintf("Expected lexing error: %s", tt.msg))
- }
-}
-
-var exprIdentifier = "abcdefghijklmnopqrstuvwxyz"
-var exprSubexpr = "abcdefghijklmnopqrstuvwxyz.abcdefghijklmnopqrstuvwxyz"
-var deeplyNested50 = "j49.j48.j47.j46.j45.j44.j43.j42.j41.j40.j39.j38.j37.j36.j35.j34.j33.j32.j31.j30.j29.j28.j27.j26.j25.j24.j23.j22.j21.j20.j19.j18.j17.j16.j15.j14.j13.j12.j11.j10.j9.j8.j7.j6.j5.j4.j3.j2.j1.j0"
-var deeplyNested50Pipe = "j49|j48|j47|j46|j45|j44|j43|j42|j41|j40|j39|j38|j37|j36|j35|j34|j33|j32|j31|j30|j29|j28|j27|j26|j25|j24|j23|j22|j21|j20|j19|j18|j17|j16|j15|j14|j13|j12|j11|j10|j9|j8|j7|j6|j5|j4|j3|j2|j1|j0"
-var deeplyNested50Index = "[49][48][47][46][45][44][43][42][41][40][39][38][37][36][35][34][33][32][31][30][29][28][27][26][25][24][23][22][21][20][19][18][17][16][15][14][13][12][11][10][9][8][7][6][5][4][3][2][1][0]"
-var deepProjection104 = "a[*].b[*].c[*].d[*].e[*].f[*].g[*].h[*].i[*].j[*].k[*].l[*].m[*].n[*].o[*].p[*].q[*].r[*].s[*].t[*].u[*].v[*].w[*].x[*].y[*].z[*].a[*].b[*].c[*].d[*].e[*].f[*].g[*].h[*].i[*].j[*].k[*].l[*].m[*].n[*].o[*].p[*].q[*].r[*].s[*].t[*].u[*].v[*].w[*].x[*].y[*].z[*].a[*].b[*].c[*].d[*].e[*].f[*].g[*].h[*].i[*].j[*].k[*].l[*].m[*].n[*].o[*].p[*].q[*].r[*].s[*].t[*].u[*].v[*].w[*].x[*].y[*].z[*].a[*].b[*].c[*].d[*].e[*].f[*].g[*].h[*].i[*].j[*].k[*].l[*].m[*].n[*].o[*].p[*].q[*].r[*].s[*].t[*].u[*].v[*].w[*].x[*].y[*].z[*]"
-var exprQuotedIdentifier = `"abcdefghijklmnopqrstuvwxyz.abcdefghijklmnopqrstuvwxyz"`
-var quotedIdentifierEscapes = `"\n\r\b\t\n\r\b\t\n\r\b\t\n\r\b\t\n\r\b\t\n\r\b\t\n\r\b\t"`
-var rawStringLiteral = `'abcdefghijklmnopqrstuvwxyz.abcdefghijklmnopqrstuvwxyz'`
-
-func BenchmarkLexIdentifier(b *testing.B) {
- runLexBenchmark(b, exprIdentifier)
-}
-
-func BenchmarkLexSubexpression(b *testing.B) {
- runLexBenchmark(b, exprSubexpr)
-}
-
-func BenchmarkLexDeeplyNested50(b *testing.B) {
- runLexBenchmark(b, deeplyNested50)
-}
-
-func BenchmarkLexDeepNested50Pipe(b *testing.B) {
- runLexBenchmark(b, deeplyNested50Pipe)
-}
-
-func BenchmarkLexDeepNested50Index(b *testing.B) {
- runLexBenchmark(b, deeplyNested50Index)
-}
-
-func BenchmarkLexQuotedIdentifier(b *testing.B) {
- runLexBenchmark(b, exprQuotedIdentifier)
-}
-
-func BenchmarkLexQuotedIdentifierEscapes(b *testing.B) {
- runLexBenchmark(b, quotedIdentifierEscapes)
-}
-
-func BenchmarkLexRawStringLiteral(b *testing.B) {
- runLexBenchmark(b, rawStringLiteral)
-}
-
-func BenchmarkLexDeepProjection104(b *testing.B) {
- runLexBenchmark(b, deepProjection104)
-}
-
-func runLexBenchmark(b *testing.B, expression string) {
- lexer := NewLexer()
- for i := 0; i < b.N; i++ {
- lexer.tokenize(expression)
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/parser.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/parser.go
deleted file mode 100644
index 1240a175..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/parser.go
+++ /dev/null
@@ -1,603 +0,0 @@
-package jmespath
-
-import (
- "encoding/json"
- "fmt"
- "strconv"
- "strings"
-)
-
-type astNodeType int
-
-//go:generate stringer -type astNodeType
-const (
- ASTEmpty astNodeType = iota
- ASTComparator
- ASTCurrentNode
- ASTExpRef
- ASTFunctionExpression
- ASTField
- ASTFilterProjection
- ASTFlatten
- ASTIdentity
- ASTIndex
- ASTIndexExpression
- ASTKeyValPair
- ASTLiteral
- ASTMultiSelectHash
- ASTMultiSelectList
- ASTOrExpression
- ASTAndExpression
- ASTNotExpression
- ASTPipe
- ASTProjection
- ASTSubexpression
- ASTSlice
- ASTValueProjection
-)
-
-// ASTNode represents the abstract syntax tree of a JMESPath expression.
-type ASTNode struct {
- nodeType astNodeType
- value interface{}
- children []ASTNode
-}
-
-func (node ASTNode) String() string {
- return node.PrettyPrint(0)
-}
-
-// PrettyPrint will pretty print the parsed AST.
-// The AST is an implementation detail and this pretty print
-// function is provided as a convenience method to help with
-// debugging. You should not rely on its output as the internal
-// structure of the AST may change at any time.
-func (node ASTNode) PrettyPrint(indent int) string {
- spaces := strings.Repeat(" ", indent)
- output := fmt.Sprintf("%s%s {\n", spaces, node.nodeType)
- nextIndent := indent + 2
- if node.value != nil {
- if converted, ok := node.value.(fmt.Stringer); ok {
- // Account for things like comparator nodes
- // that are enums with a String() method.
- output += fmt.Sprintf("%svalue: %s\n", strings.Repeat(" ", nextIndent), converted.String())
- } else {
- output += fmt.Sprintf("%svalue: %#v\n", strings.Repeat(" ", nextIndent), node.value)
- }
- }
- lastIndex := len(node.children)
- if lastIndex > 0 {
- output += fmt.Sprintf("%schildren: {\n", strings.Repeat(" ", nextIndent))
- childIndent := nextIndent + 2
- for _, elem := range node.children {
- output += elem.PrettyPrint(childIndent)
- }
- }
- output += fmt.Sprintf("%s}\n", spaces)
- return output
-}
-
-var bindingPowers = map[tokType]int{
- tEOF: 0,
- tUnquotedIdentifier: 0,
- tQuotedIdentifier: 0,
- tRbracket: 0,
- tRparen: 0,
- tComma: 0,
- tRbrace: 0,
- tNumber: 0,
- tCurrent: 0,
- tExpref: 0,
- tColon: 0,
- tPipe: 1,
- tOr: 2,
- tAnd: 3,
- tEQ: 5,
- tLT: 5,
- tLTE: 5,
- tGT: 5,
- tGTE: 5,
- tNE: 5,
- tFlatten: 9,
- tStar: 20,
- tFilter: 21,
- tDot: 40,
- tNot: 45,
- tLbrace: 50,
- tLbracket: 55,
- tLparen: 60,
-}
-
-// Parser holds state about the current expression being parsed.
-type Parser struct {
- expression string
- tokens []token
- index int
-}
-
-// NewParser creates a new JMESPath parser.
-func NewParser() *Parser {
- p := Parser{}
- return &p
-}
-
-// Parse will compile a JMESPath expression.
-func (p *Parser) Parse(expression string) (ASTNode, error) {
- lexer := NewLexer()
- p.expression = expression
- p.index = 0
- tokens, err := lexer.tokenize(expression)
- if err != nil {
- return ASTNode{}, err
- }
- p.tokens = tokens
- parsed, err := p.parseExpression(0)
- if err != nil {
- return ASTNode{}, err
- }
- if p.current() != tEOF {
- return ASTNode{}, p.syntaxError(fmt.Sprintf(
- "Unexpected token at the end of the expresssion: %s", p.current()))
- }
- return parsed, nil
-}
-
-func (p *Parser) parseExpression(bindingPower int) (ASTNode, error) {
- var err error
- leftToken := p.lookaheadToken(0)
- p.advance()
- leftNode, err := p.nud(leftToken)
- if err != nil {
- return ASTNode{}, err
- }
- currentToken := p.current()
- for bindingPower < bindingPowers[currentToken] {
- p.advance()
- leftNode, err = p.led(currentToken, leftNode)
- if err != nil {
- return ASTNode{}, err
- }
- currentToken = p.current()
- }
- return leftNode, nil
-}
-
-func (p *Parser) parseIndexExpression() (ASTNode, error) {
- if p.lookahead(0) == tColon || p.lookahead(1) == tColon {
- return p.parseSliceExpression()
- }
- indexStr := p.lookaheadToken(0).value
- parsedInt, err := strconv.Atoi(indexStr)
- if err != nil {
- return ASTNode{}, err
- }
- indexNode := ASTNode{nodeType: ASTIndex, value: parsedInt}
- p.advance()
- if err := p.match(tRbracket); err != nil {
- return ASTNode{}, err
- }
- return indexNode, nil
-}
-
-func (p *Parser) parseSliceExpression() (ASTNode, error) {
- parts := []*int{nil, nil, nil}
- index := 0
- current := p.current()
- for current != tRbracket && index < 3 {
- if current == tColon {
- index++
- p.advance()
- } else if current == tNumber {
- parsedInt, err := strconv.Atoi(p.lookaheadToken(0).value)
- if err != nil {
- return ASTNode{}, err
- }
- parts[index] = &parsedInt
- p.advance()
- } else {
- return ASTNode{}, p.syntaxError(
- "Expected tColon or tNumber" + ", received: " + p.current().String())
- }
- current = p.current()
- }
- if err := p.match(tRbracket); err != nil {
- return ASTNode{}, err
- }
- return ASTNode{
- nodeType: ASTSlice,
- value: parts,
- }, nil
-}
-
-func (p *Parser) match(tokenType tokType) error {
- if p.current() == tokenType {
- p.advance()
- return nil
- }
- return p.syntaxError("Expected " + tokenType.String() + ", received: " + p.current().String())
-}
-
-func (p *Parser) led(tokenType tokType, node ASTNode) (ASTNode, error) {
- switch tokenType {
- case tDot:
- if p.current() != tStar {
- right, err := p.parseDotRHS(bindingPowers[tDot])
- return ASTNode{
- nodeType: ASTSubexpression,
- children: []ASTNode{node, right},
- }, err
- }
- p.advance()
- right, err := p.parseProjectionRHS(bindingPowers[tDot])
- return ASTNode{
- nodeType: ASTValueProjection,
- children: []ASTNode{node, right},
- }, err
- case tPipe:
- right, err := p.parseExpression(bindingPowers[tPipe])
- return ASTNode{nodeType: ASTPipe, children: []ASTNode{node, right}}, err
- case tOr:
- right, err := p.parseExpression(bindingPowers[tOr])
- return ASTNode{nodeType: ASTOrExpression, children: []ASTNode{node, right}}, err
- case tAnd:
- right, err := p.parseExpression(bindingPowers[tAnd])
- return ASTNode{nodeType: ASTAndExpression, children: []ASTNode{node, right}}, err
- case tLparen:
- name := node.value
- var args []ASTNode
- for p.current() != tRparen {
- expression, err := p.parseExpression(0)
- if err != nil {
- return ASTNode{}, err
- }
- if p.current() == tComma {
- if err := p.match(tComma); err != nil {
- return ASTNode{}, err
- }
- }
- args = append(args, expression)
- }
- if err := p.match(tRparen); err != nil {
- return ASTNode{}, err
- }
- return ASTNode{
- nodeType: ASTFunctionExpression,
- value: name,
- children: args,
- }, nil
- case tFilter:
- return p.parseFilter(node)
- case tFlatten:
- left := ASTNode{nodeType: ASTFlatten, children: []ASTNode{node}}
- right, err := p.parseProjectionRHS(bindingPowers[tFlatten])
- return ASTNode{
- nodeType: ASTProjection,
- children: []ASTNode{left, right},
- }, err
- case tEQ, tNE, tGT, tGTE, tLT, tLTE:
- right, err := p.parseExpression(bindingPowers[tokenType])
- if err != nil {
- return ASTNode{}, err
- }
- return ASTNode{
- nodeType: ASTComparator,
- value: tokenType,
- children: []ASTNode{node, right},
- }, nil
- case tLbracket:
- tokenType := p.current()
- var right ASTNode
- var err error
- if tokenType == tNumber || tokenType == tColon {
- right, err = p.parseIndexExpression()
- if err != nil {
- return ASTNode{}, err
- }
- return p.projectIfSlice(node, right)
- }
- // Otherwise this is a projection.
- if err := p.match(tStar); err != nil {
- return ASTNode{}, err
- }
- if err := p.match(tRbracket); err != nil {
- return ASTNode{}, err
- }
- right, err = p.parseProjectionRHS(bindingPowers[tStar])
- if err != nil {
- return ASTNode{}, err
- }
- return ASTNode{
- nodeType: ASTProjection,
- children: []ASTNode{node, right},
- }, nil
- }
- return ASTNode{}, p.syntaxError("Unexpected token: " + tokenType.String())
-}
-
-func (p *Parser) nud(token token) (ASTNode, error) {
- switch token.tokenType {
- case tJSONLiteral:
- var parsed interface{}
- err := json.Unmarshal([]byte(token.value), &parsed)
- if err != nil {
- return ASTNode{}, err
- }
- return ASTNode{nodeType: ASTLiteral, value: parsed}, nil
- case tStringLiteral:
- return ASTNode{nodeType: ASTLiteral, value: token.value}, nil
- case tUnquotedIdentifier:
- return ASTNode{
- nodeType: ASTField,
- value: token.value,
- }, nil
- case tQuotedIdentifier:
- node := ASTNode{nodeType: ASTField, value: token.value}
- if p.current() == tLparen {
- return ASTNode{}, p.syntaxErrorToken("Can't have quoted identifier as function name.", token)
- }
- return node, nil
- case tStar:
- left := ASTNode{nodeType: ASTIdentity}
- var right ASTNode
- var err error
- if p.current() == tRbracket {
- right = ASTNode{nodeType: ASTIdentity}
- } else {
- right, err = p.parseProjectionRHS(bindingPowers[tStar])
- }
- return ASTNode{nodeType: ASTValueProjection, children: []ASTNode{left, right}}, err
- case tFilter:
- return p.parseFilter(ASTNode{nodeType: ASTIdentity})
- case tLbrace:
- return p.parseMultiSelectHash()
- case tFlatten:
- left := ASTNode{
- nodeType: ASTFlatten,
- children: []ASTNode{{nodeType: ASTIdentity}},
- }
- right, err := p.parseProjectionRHS(bindingPowers[tFlatten])
- if err != nil {
- return ASTNode{}, err
- }
- return ASTNode{nodeType: ASTProjection, children: []ASTNode{left, right}}, nil
- case tLbracket:
- tokenType := p.current()
- //var right ASTNode
- if tokenType == tNumber || tokenType == tColon {
- right, err := p.parseIndexExpression()
- if err != nil {
- return ASTNode{}, nil
- }
- return p.projectIfSlice(ASTNode{nodeType: ASTIdentity}, right)
- } else if tokenType == tStar && p.lookahead(1) == tRbracket {
- p.advance()
- p.advance()
- right, err := p.parseProjectionRHS(bindingPowers[tStar])
- if err != nil {
- return ASTNode{}, err
- }
- return ASTNode{
- nodeType: ASTProjection,
- children: []ASTNode{{nodeType: ASTIdentity}, right},
- }, nil
- } else {
- return p.parseMultiSelectList()
- }
- case tCurrent:
- return ASTNode{nodeType: ASTCurrentNode}, nil
- case tExpref:
- expression, err := p.parseExpression(bindingPowers[tExpref])
- if err != nil {
- return ASTNode{}, err
- }
- return ASTNode{nodeType: ASTExpRef, children: []ASTNode{expression}}, nil
- case tNot:
- expression, err := p.parseExpression(bindingPowers[tNot])
- if err != nil {
- return ASTNode{}, err
- }
- return ASTNode{nodeType: ASTNotExpression, children: []ASTNode{expression}}, nil
- case tLparen:
- expression, err := p.parseExpression(0)
- if err != nil {
- return ASTNode{}, err
- }
- if err := p.match(tRparen); err != nil {
- return ASTNode{}, err
- }
- return expression, nil
- case tEOF:
- return ASTNode{}, p.syntaxErrorToken("Incomplete expression", token)
- }
-
- return ASTNode{}, p.syntaxErrorToken("Invalid token: "+token.tokenType.String(), token)
-}
-
-func (p *Parser) parseMultiSelectList() (ASTNode, error) {
- var expressions []ASTNode
- for {
- expression, err := p.parseExpression(0)
- if err != nil {
- return ASTNode{}, err
- }
- expressions = append(expressions, expression)
- if p.current() == tRbracket {
- break
- }
- err = p.match(tComma)
- if err != nil {
- return ASTNode{}, err
- }
- }
- err := p.match(tRbracket)
- if err != nil {
- return ASTNode{}, err
- }
- return ASTNode{
- nodeType: ASTMultiSelectList,
- children: expressions,
- }, nil
-}
-
-func (p *Parser) parseMultiSelectHash() (ASTNode, error) {
- var children []ASTNode
- for {
- keyToken := p.lookaheadToken(0)
- if err := p.match(tUnquotedIdentifier); err != nil {
- if err := p.match(tQuotedIdentifier); err != nil {
- return ASTNode{}, p.syntaxError("Expected tQuotedIdentifier or tUnquotedIdentifier")
- }
- }
- keyName := keyToken.value
- err := p.match(tColon)
- if err != nil {
- return ASTNode{}, err
- }
- value, err := p.parseExpression(0)
- if err != nil {
- return ASTNode{}, err
- }
- node := ASTNode{
- nodeType: ASTKeyValPair,
- value: keyName,
- children: []ASTNode{value},
- }
- children = append(children, node)
- if p.current() == tComma {
- err := p.match(tComma)
- if err != nil {
- return ASTNode{}, nil
- }
- } else if p.current() == tRbrace {
- err := p.match(tRbrace)
- if err != nil {
- return ASTNode{}, nil
- }
- break
- }
- }
- return ASTNode{
- nodeType: ASTMultiSelectHash,
- children: children,
- }, nil
-}
-
-func (p *Parser) projectIfSlice(left ASTNode, right ASTNode) (ASTNode, error) {
- indexExpr := ASTNode{
- nodeType: ASTIndexExpression,
- children: []ASTNode{left, right},
- }
- if right.nodeType == ASTSlice {
- right, err := p.parseProjectionRHS(bindingPowers[tStar])
- return ASTNode{
- nodeType: ASTProjection,
- children: []ASTNode{indexExpr, right},
- }, err
- }
- return indexExpr, nil
-}
-func (p *Parser) parseFilter(node ASTNode) (ASTNode, error) {
- var right, condition ASTNode
- var err error
- condition, err = p.parseExpression(0)
- if err != nil {
- return ASTNode{}, err
- }
- if err := p.match(tRbracket); err != nil {
- return ASTNode{}, err
- }
- if p.current() == tFlatten {
- right = ASTNode{nodeType: ASTIdentity}
- } else {
- right, err = p.parseProjectionRHS(bindingPowers[tFilter])
- if err != nil {
- return ASTNode{}, err
- }
- }
-
- return ASTNode{
- nodeType: ASTFilterProjection,
- children: []ASTNode{node, right, condition},
- }, nil
-}
-
-func (p *Parser) parseDotRHS(bindingPower int) (ASTNode, error) {
- lookahead := p.current()
- if tokensOneOf([]tokType{tQuotedIdentifier, tUnquotedIdentifier, tStar}, lookahead) {
- return p.parseExpression(bindingPower)
- } else if lookahead == tLbracket {
- if err := p.match(tLbracket); err != nil {
- return ASTNode{}, err
- }
- return p.parseMultiSelectList()
- } else if lookahead == tLbrace {
- if err := p.match(tLbrace); err != nil {
- return ASTNode{}, err
- }
- return p.parseMultiSelectHash()
- }
- return ASTNode{}, p.syntaxError("Expected identifier, lbracket, or lbrace")
-}
-
-func (p *Parser) parseProjectionRHS(bindingPower int) (ASTNode, error) {
- current := p.current()
- if bindingPowers[current] < 10 {
- return ASTNode{nodeType: ASTIdentity}, nil
- } else if current == tLbracket {
- return p.parseExpression(bindingPower)
- } else if current == tFilter {
- return p.parseExpression(bindingPower)
- } else if current == tDot {
- err := p.match(tDot)
- if err != nil {
- return ASTNode{}, err
- }
- return p.parseDotRHS(bindingPower)
- } else {
- return ASTNode{}, p.syntaxError("Error")
- }
-}
-
-func (p *Parser) lookahead(number int) tokType {
- return p.lookaheadToken(number).tokenType
-}
-
-func (p *Parser) current() tokType {
- return p.lookahead(0)
-}
-
-func (p *Parser) lookaheadToken(number int) token {
- return p.tokens[p.index+number]
-}
-
-func (p *Parser) advance() {
- p.index++
-}
-
-func tokensOneOf(elements []tokType, token tokType) bool {
- for _, elem := range elements {
- if elem == token {
- return true
- }
- }
- return false
-}
-
-func (p *Parser) syntaxError(msg string) SyntaxError {
- return SyntaxError{
- msg: msg,
- Expression: p.expression,
- Offset: p.lookaheadToken(0).position,
- }
-}
-
-// Create a SyntaxError based on the provided token.
-// This differs from syntaxError() which creates a SyntaxError
-// based on the current lookahead token.
-func (p *Parser) syntaxErrorToken(msg string, t token) SyntaxError {
- return SyntaxError{
- msg: msg,
- Expression: p.expression,
- Offset: t.position,
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/parser_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/parser_test.go
deleted file mode 100644
index 997a0f4d..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/parser_test.go
+++ /dev/null
@@ -1,136 +0,0 @@
-package jmespath
-
-import (
- "fmt"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-var parsingErrorTests = []struct {
- expression string
- msg string
-}{
- {"foo.", "Incopmlete expression"},
- {"[foo", "Incopmlete expression"},
- {"]", "Invalid"},
- {")", "Invalid"},
- {"}", "Invalid"},
- {"foo..bar", "Invalid"},
- {`foo."bar`, "Forwards lexer errors"},
- {`{foo: bar`, "Incomplete expression"},
- {`{foo bar}`, "Invalid"},
- {`[foo bar]`, "Invalid"},
- {`foo@`, "Invalid"},
- {`&&&&&&&&&&&&t(`, "Invalid"},
- {`[*][`, "Invalid"},
-}
-
-func TestParsingErrors(t *testing.T) {
- assert := assert.New(t)
- parser := NewParser()
- for _, tt := range parsingErrorTests {
- _, err := parser.Parse(tt.expression)
- assert.NotNil(err, fmt.Sprintf("Expected parsing error: %s, for expression: %s", tt.msg, tt.expression))
- }
-}
-
-var prettyPrinted = `ASTProjection {
- children: {
- ASTField {
- value: "foo"
- }
- ASTSubexpression {
- children: {
- ASTSubexpression {
- children: {
- ASTField {
- value: "bar"
- }
- ASTField {
- value: "baz"
- }
- }
- ASTField {
- value: "qux"
- }
- }
-}
-`
-
-var prettyPrintedCompNode = `ASTFilterProjection {
- children: {
- ASTField {
- value: "a"
- }
- ASTIdentity {
- }
- ASTComparator {
- value: tLTE
- children: {
- ASTField {
- value: "b"
- }
- ASTField {
- value: "c"
- }
- }
-}
-`
-
-func TestPrettyPrintedAST(t *testing.T) {
- assert := assert.New(t)
- parser := NewParser()
- parsed, _ := parser.Parse("foo[*].bar.baz.qux")
- assert.Equal(parsed.PrettyPrint(0), prettyPrinted)
-}
-
-func TestPrettyPrintedCompNode(t *testing.T) {
- assert := assert.New(t)
- parser := NewParser()
- parsed, _ := parser.Parse("a[?b<=c]")
- assert.Equal(parsed.PrettyPrint(0), prettyPrintedCompNode)
-}
-
-func BenchmarkParseIdentifier(b *testing.B) {
- runParseBenchmark(b, exprIdentifier)
-}
-
-func BenchmarkParseSubexpression(b *testing.B) {
- runParseBenchmark(b, exprSubexpr)
-}
-
-func BenchmarkParseDeeplyNested50(b *testing.B) {
- runParseBenchmark(b, deeplyNested50)
-}
-
-func BenchmarkParseDeepNested50Pipe(b *testing.B) {
- runParseBenchmark(b, deeplyNested50Pipe)
-}
-
-func BenchmarkParseDeepNested50Index(b *testing.B) {
- runParseBenchmark(b, deeplyNested50Index)
-}
-
-func BenchmarkParseQuotedIdentifier(b *testing.B) {
- runParseBenchmark(b, exprQuotedIdentifier)
-}
-
-func BenchmarkParseQuotedIdentifierEscapes(b *testing.B) {
- runParseBenchmark(b, quotedIdentifierEscapes)
-}
-
-func BenchmarkParseRawStringLiteral(b *testing.B) {
- runParseBenchmark(b, rawStringLiteral)
-}
-
-func BenchmarkParseDeepProjection104(b *testing.B) {
- runParseBenchmark(b, deepProjection104)
-}
-
-func runParseBenchmark(b *testing.B, expression string) {
- parser := NewParser()
- for i := 0; i < b.N; i++ {
- parser.Parse(expression)
- }
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/toktype_string.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/toktype_string.go
deleted file mode 100644
index dae79cbd..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/toktype_string.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// generated by stringer -type=tokType; DO NOT EDIT
-
-package jmespath
-
-import "fmt"
-
-const _tokType_name = "tUnknowntStartDottFiltertFlattentLparentRparentLbrackettRbrackettLbracetRbracetOrtPipetNumbertUnquotedIdentifiertQuotedIdentifiertCommatColontLTtLTEtGTtGTEtEQtNEtJSONLiteraltStringLiteraltCurrenttExpreftAndtNottEOF"
-
-var _tokType_index = [...]uint8{0, 8, 13, 17, 24, 32, 39, 46, 55, 64, 71, 78, 81, 86, 93, 112, 129, 135, 141, 144, 148, 151, 155, 158, 161, 173, 187, 195, 202, 206, 210, 214}
-
-func (i tokType) String() string {
- if i < 0 || i >= tokType(len(_tokType_index)-1) {
- return fmt.Sprintf("tokType(%d)", i)
- }
- return _tokType_name[_tokType_index[i]:_tokType_index[i+1]]
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/util.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/util.go
deleted file mode 100644
index ddc1b7d7..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/util.go
+++ /dev/null
@@ -1,185 +0,0 @@
-package jmespath
-
-import (
- "errors"
- "reflect"
-)
-
-// IsFalse determines if an object is false based on the JMESPath spec.
-// JMESPath defines false values to be any of:
-// - An empty string array, or hash.
-// - The boolean value false.
-// - nil
-func isFalse(value interface{}) bool {
- switch v := value.(type) {
- case bool:
- return !v
- case []interface{}:
- return len(v) == 0
- case map[string]interface{}:
- return len(v) == 0
- case string:
- return len(v) == 0
- case nil:
- return true
- }
- // Try the reflection cases before returning false.
- rv := reflect.ValueOf(value)
- switch rv.Kind() {
- case reflect.Struct:
- // A struct type will never be false, even if
- // all of its values are the zero type.
- return false
- case reflect.Slice, reflect.Map:
- return rv.Len() == 0
- case reflect.Ptr:
- if rv.IsNil() {
- return true
- }
- // If it's a pointer type, we'll try to deref the pointer
- // and evaluate the pointer value for isFalse.
- element := rv.Elem()
- return isFalse(element.Interface())
- }
- return false
-}
-
-// ObjsEqual is a generic object equality check.
-// It will take two arbitrary objects and recursively determine
-// if they are equal.
-func objsEqual(left interface{}, right interface{}) bool {
- return reflect.DeepEqual(left, right)
-}
-
-// SliceParam refers to a single part of a slice.
-// A slice consists of a start, a stop, and a step, similar to
-// python slices.
-type sliceParam struct {
- N int
- Specified bool
-}
-
-// Slice supports [start:stop:step] style slicing that's supported in JMESPath.
-func slice(slice []interface{}, parts []sliceParam) ([]interface{}, error) {
- computed, err := computeSliceParams(len(slice), parts)
- if err != nil {
- return nil, err
- }
- start, stop, step := computed[0], computed[1], computed[2]
- result := []interface{}{}
- if step > 0 {
- for i := start; i < stop; i += step {
- result = append(result, slice[i])
- }
- } else {
- for i := start; i > stop; i += step {
- result = append(result, slice[i])
- }
- }
- return result, nil
-}
-
-func computeSliceParams(length int, parts []sliceParam) ([]int, error) {
- var start, stop, step int
- if !parts[2].Specified {
- step = 1
- } else if parts[2].N == 0 {
- return nil, errors.New("Invalid slice, step cannot be 0")
- } else {
- step = parts[2].N
- }
- var stepValueNegative bool
- if step < 0 {
- stepValueNegative = true
- } else {
- stepValueNegative = false
- }
-
- if !parts[0].Specified {
- if stepValueNegative {
- start = length - 1
- } else {
- start = 0
- }
- } else {
- start = capSlice(length, parts[0].N, step)
- }
-
- if !parts[1].Specified {
- if stepValueNegative {
- stop = -1
- } else {
- stop = length
- }
- } else {
- stop = capSlice(length, parts[1].N, step)
- }
- return []int{start, stop, step}, nil
-}
-
-func capSlice(length int, actual int, step int) int {
- if actual < 0 {
- actual += length
- if actual < 0 {
- if step < 0 {
- actual = -1
- } else {
- actual = 0
- }
- }
- } else if actual >= length {
- if step < 0 {
- actual = length - 1
- } else {
- actual = length
- }
- }
- return actual
-}
-
-// ToArrayNum converts an empty interface type to a slice of float64.
-// If any element in the array cannot be converted, then nil is returned
-// along with a second value of false.
-func toArrayNum(data interface{}) ([]float64, bool) {
- // Is there a better way to do this with reflect?
- if d, ok := data.([]interface{}); ok {
- result := make([]float64, len(d))
- for i, el := range d {
- item, ok := el.(float64)
- if !ok {
- return nil, false
- }
- result[i] = item
- }
- return result, true
- }
- return nil, false
-}
-
-// ToArrayStr converts an empty interface type to a slice of strings.
-// If any element in the array cannot be converted, then nil is returned
-// along with a second value of false. If the input data could be entirely
-// converted, then the converted data, along with a second value of true,
-// will be returned.
-func toArrayStr(data interface{}) ([]string, bool) {
- // Is there a better way to do this with reflect?
- if d, ok := data.([]interface{}); ok {
- result := make([]string, len(d))
- for i, el := range d {
- item, ok := el.(string)
- if !ok {
- return nil, false
- }
- result[i] = item
- }
- return result, true
- }
- return nil, false
-}
-
-func isSliceType(v interface{}) bool {
- if v == nil {
- return false
- }
- return reflect.TypeOf(v).Kind() == reflect.Slice
-}
diff --git a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/util_test.go b/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/util_test.go
deleted file mode 100644
index 1754b5d3..00000000
--- a/vendor/github.com/aws/aws-sdk-go/vendor/github.com/jmespath/go-jmespath/util_test.go
+++ /dev/null
@@ -1,73 +0,0 @@
-package jmespath
-
-import (
- "github.com/stretchr/testify/assert"
- "testing"
-)
-
-func TestSlicePositiveStep(t *testing.T) {
- assert := assert.New(t)
- input := make([]interface{}, 5)
- input[0] = 0
- input[1] = 1
- input[2] = 2
- input[3] = 3
- input[4] = 4
- result, err := slice(input, []sliceParam{{0, true}, {3, true}, {1, true}})
- assert.Nil(err)
- assert.Equal(input[:3], result)
-}
-
-func TestIsFalseJSONTypes(t *testing.T) {
- assert := assert.New(t)
- assert.True(isFalse(false))
- assert.True(isFalse(""))
- var empty []interface{}
- assert.True(isFalse(empty))
- m := make(map[string]interface{})
- assert.True(isFalse(m))
- assert.True(isFalse(nil))
-
-}
-
-func TestIsFalseWithUserDefinedStructs(t *testing.T) {
- assert := assert.New(t)
- type nilStructType struct {
- SliceOfPointers []*string
- }
- nilStruct := nilStructType{SliceOfPointers: nil}
- assert.True(isFalse(nilStruct.SliceOfPointers))
-
- // A user defined struct will never be false though,
- // even if it's fields are the zero type.
- assert.False(isFalse(nilStruct))
-}
-
-func TestIsFalseWithNilInterface(t *testing.T) {
- assert := assert.New(t)
- var a *int = nil
- var nilInterface interface{}
- nilInterface = a
- assert.True(isFalse(nilInterface))
-}
-
-func TestIsFalseWithMapOfUserStructs(t *testing.T) {
- assert := assert.New(t)
- type foo struct {
- Bar string
- Baz string
- }
- m := make(map[int]foo)
- assert.True(isFalse(m))
-}
-
-func TestObjsEqual(t *testing.T) {
- assert := assert.New(t)
- assert.True(objsEqual("foo", "foo"))
- assert.True(objsEqual(20, 20))
- assert.True(objsEqual([]int{1, 2, 3}, []int{1, 2, 3}))
- assert.True(objsEqual(nil, nil))
- assert.True(!objsEqual(nil, "foo"))
- assert.True(objsEqual([]int{}, []int{}))
- assert.True(!objsEqual([]int{}, nil))
-}