summaryrefslogtreecommitdiff
path: root/data
diff options
context:
space:
mode:
authorDave Henderson <dhenderson@gmail.com>2017-08-05 13:21:05 -0400
committerDave Henderson <dhenderson@gmail.com>2017-08-09 21:51:07 -0400
commit51ddb6e800ab087fa3dff19686b0f1f39a1a4432 (patch)
tree1892e841efa720c2cc387cd0de7f9c1b6d318c63 /data
parentdd5a7e412352f2e268973b428648cca6e549dc83 (diff)
Extracting data namespace, renaming typeconv to conv namespace
Signed-off-by: Dave Henderson <dhenderson@gmail.com>
Diffstat (limited to 'data')
-rw-r--r--data/data.go250
-rw-r--r--data/data_test.go319
-rw-r--r--data/datasource.go452
-rw-r--r--data/datasource_test.go300
4 files changed, 1321 insertions, 0 deletions
diff --git a/data/data.go b/data/data.go
new file mode 100644
index 00000000..10102eaa
--- /dev/null
+++ b/data/data.go
@@ -0,0 +1,250 @@
+package data
+
+import (
+ "bytes"
+ "encoding/csv"
+ "encoding/json"
+ "log"
+ "strings"
+
+ // XXX: replace once https://github.com/BurntSushi/toml/pull/179 is merged
+ "github.com/hairyhenderson/toml"
+ "github.com/ugorji/go/codec"
+ yaml "gopkg.in/yaml.v2"
+)
+
+func unmarshalObj(obj map[string]interface{}, in string, f func([]byte, interface{}) error) map[string]interface{} {
+ err := f([]byte(in), &obj)
+ if err != nil {
+ log.Fatalf("Unable to unmarshal object %s: %v", in, err)
+ }
+ return obj
+}
+
+func unmarshalArray(obj []interface{}, in string, f func([]byte, interface{}) error) []interface{} {
+ err := f([]byte(in), &obj)
+ if err != nil {
+ log.Fatalf("Unable to unmarshal array %s: %v", in, err)
+ }
+ return obj
+}
+
+// JSON - Unmarshal a JSON Object
+func JSON(in string) map[string]interface{} {
+ obj := make(map[string]interface{})
+ return unmarshalObj(obj, in, yaml.Unmarshal)
+}
+
+// JSONArray - Unmarshal a JSON Array
+func JSONArray(in string) []interface{} {
+ obj := make([]interface{}, 1)
+ return unmarshalArray(obj, in, yaml.Unmarshal)
+}
+
+// YAML - Unmarshal a YAML Object
+func YAML(in string) map[string]interface{} {
+ obj := make(map[string]interface{})
+ return unmarshalObj(obj, in, yaml.Unmarshal)
+}
+
+// YAMLArray - Unmarshal a YAML Array
+func YAMLArray(in string) []interface{} {
+ obj := make([]interface{}, 1)
+ return unmarshalArray(obj, in, yaml.Unmarshal)
+}
+
+// TOML - Unmarshal a TOML Object
+func TOML(in string) interface{} {
+ obj := make(map[string]interface{})
+ return unmarshalObj(obj, in, toml.Unmarshal)
+}
+
+func parseCSV(args ...string) (records [][]string, hdr []string) {
+ delim := ","
+ var in string
+ if len(args) == 1 {
+ in = args[0]
+ }
+ if len(args) == 2 {
+ in = args[1]
+ if len(args[0]) == 1 {
+ delim = args[0]
+ } else if len(args[0]) == 0 {
+ hdr = []string{}
+ } else {
+ hdr = strings.Split(args[0], delim)
+ }
+ }
+ if len(args) == 3 {
+ delim = args[0]
+ hdr = strings.Split(args[1], delim)
+ in = args[2]
+ }
+ c := csv.NewReader(strings.NewReader(in))
+ c.Comma = rune(delim[0])
+ records, err := c.ReadAll()
+ if err != nil {
+ log.Fatal(err)
+ }
+ if hdr == nil {
+ hdr = records[0]
+ records = records[1:]
+ } else if len(hdr) == 0 {
+ hdr = make([]string, len(records[0]))
+ for i := range hdr {
+ hdr[i] = autoIndex(i)
+ }
+ }
+ return records, hdr
+}
+
+// autoIndex - calculates a default string column name given a numeric value
+func autoIndex(i int) string {
+ s := ""
+ for n := 0; n <= i/26; n++ {
+ s += string('A' + i%26)
+ }
+ return s
+}
+
+// CSV - Unmarshal CSV
+// parameters:
+// delim - (optional) the (single-character!) field delimiter, defaults to ","
+// in - the CSV-format string to parse
+// returns:
+// an array of rows, which are arrays of cells (strings)
+func CSV(args ...string) [][]string {
+ records, hdr := parseCSV(args...)
+ records = append(records, nil)
+ copy(records[1:], records)
+ records[0] = hdr
+ return records
+}
+
+// CSVByRow - Unmarshal CSV in a row-oriented form
+// parameters:
+// delim - (optional) the (single-character!) field delimiter, defaults to ","
+// hdr - (optional) comma-separated list of column names,
+// set to "" to get auto-named columns (A-Z), omit
+// to use the first line
+// in - the CSV-format string to parse
+// returns:
+// an array of rows, indexed by the header name
+func CSVByRow(args ...string) (rows []map[string]string) {
+ records, hdr := parseCSV(args...)
+ for _, record := range records {
+ m := make(map[string]string)
+ for i, v := range record {
+ m[hdr[i]] = v
+ }
+ rows = append(rows, m)
+ }
+ return rows
+}
+
+// CSVByColumn - Unmarshal CSV in a Columnar form
+// parameters:
+// delim - (optional) the (single-character!) field delimiter, defaults to ","
+// hdr - (optional) comma-separated list of column names,
+// set to "" to get auto-named columns (A-Z), omit
+// to use the first line
+// in - the CSV-format string to parse
+// returns:
+// a map of columns, indexed by the header name. values are arrays of strings
+func CSVByColumn(args ...string) (cols map[string][]string) {
+ records, hdr := parseCSV(args...)
+ cols = make(map[string][]string)
+ for _, record := range records {
+ for i, v := range record {
+ cols[hdr[i]] = append(cols[hdr[i]], v)
+ }
+ }
+ return cols
+}
+
+// ToCSV -
+func ToCSV(args ...interface{}) string {
+ delim := ","
+ var in [][]string
+ if len(args) == 2 {
+ d, ok := args[0].(string)
+ if ok {
+ delim = d
+ } else {
+ log.Fatalf("Can't parse ToCSV delimiter (%v) - must be string (is a %T)", args[0], args[0])
+ }
+ in, ok = args[1].([][]string)
+ if !ok {
+ log.Fatal("Can't parse ToCSV input - must be of type [][]string")
+ }
+ }
+ if len(args) == 1 {
+ var ok bool
+ in, ok = args[0].([][]string)
+ if !ok {
+ log.Fatal("Can't parse ToCSV input - must be of type [][]string")
+ }
+ }
+ b := &bytes.Buffer{}
+ c := csv.NewWriter(b)
+ c.Comma = rune(delim[0])
+ // We output RFC4180 CSV, so force this to CRLF
+ c.UseCRLF = true
+ err := c.WriteAll(in)
+ if err != nil {
+ log.Fatal(err)
+ }
+ return string(b.Bytes())
+}
+
+func marshalObj(obj interface{}, f func(interface{}) ([]byte, error)) string {
+ b, err := f(obj)
+ if err != nil {
+ log.Fatalf("Unable to marshal object %s: %v", obj, err)
+ }
+
+ return string(b)
+}
+
+func toJSONBytes(in interface{}) []byte {
+ h := &codec.JsonHandle{}
+ h.Canonical = true
+ buf := new(bytes.Buffer)
+ err := codec.NewEncoder(buf, h).Encode(in)
+ if err != nil {
+ log.Fatalf("Unable to marshal %s: %v", in, err)
+ }
+ return buf.Bytes()
+}
+
+// ToJSON - Stringify a struct as JSON
+func ToJSON(in interface{}) string {
+ return string(toJSONBytes(in))
+}
+
+// ToJSONPretty - Stringify a struct as JSON (indented)
+func ToJSONPretty(indent string, in interface{}) string {
+ out := new(bytes.Buffer)
+ b := toJSONBytes(in)
+ err := json.Indent(out, b, "", indent)
+ if err != nil {
+ log.Fatalf("Unable to indent JSON %s: %v", b, err)
+ }
+
+ return string(out.Bytes())
+}
+
+// ToYAML - Stringify a struct as YAML
+func ToYAML(in interface{}) string {
+ return marshalObj(in, yaml.Marshal)
+}
+
+// ToTOML - Stringify a struct as TOML
+func ToTOML(in interface{}) string {
+ buf := new(bytes.Buffer)
+ err := toml.NewEncoder(buf).Encode(in)
+ if err != nil {
+ log.Fatalf("Unable to marshal %s: %v", in, err)
+ }
+ return string(buf.Bytes())
+}
diff --git a/data/data_test.go b/data/data_test.go
new file mode 100644
index 00000000..eb284940
--- /dev/null
+++ b/data/data_test.go
@@ -0,0 +1,319 @@
+package data
+
+import (
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestUnmarshalObj(t *testing.T) {
+ expected := map[string]interface{}{
+ "foo": map[interface{}]interface{}{"bar": "baz"},
+ "one": 1.0,
+ "true": true,
+ }
+
+ test := func(actual map[string]interface{}) {
+ assert.Equal(t, expected["foo"], actual["foo"])
+ assert.Equal(t, expected["one"], actual["one"])
+ assert.Equal(t, expected["true"], actual["true"])
+ }
+ test(JSON(`{"foo":{"bar":"baz"},"one":1.0,"true":true}`))
+ test(YAML(`foo:
+ bar: baz
+one: 1.0
+true: true
+`))
+}
+
+func TestUnmarshalArray(t *testing.T) {
+
+ expected := []string{"foo", "bar"}
+
+ test := func(actual []interface{}) {
+ assert.Equal(t, expected[0], actual[0])
+ assert.Equal(t, expected[1], actual[1])
+ }
+ test(JSONArray(`["foo","bar"]`))
+ test(YAMLArray(`
+- foo
+- bar
+`))
+}
+
+func TestToJSON(t *testing.T) {
+ expected := `{"down":{"the":{"rabbit":{"hole":true}}},"foo":"bar","one":1,"true":true}`
+ in := map[string]interface{}{
+ "foo": "bar",
+ "one": 1,
+ "true": true,
+ "down": map[interface{}]interface{}{
+ "the": map[interface{}]interface{}{
+ "rabbit": map[interface{}]interface{}{
+ "hole": true,
+ },
+ },
+ },
+ }
+ assert.Equal(t, expected, ToJSON(in))
+}
+
+func TestToJSONPretty(t *testing.T) {
+ expected := `{
+ "down": {
+ "the": {
+ "rabbit": {
+ "hole": true
+ }
+ }
+ },
+ "foo": "bar",
+ "one": 1,
+ "true": true
+}`
+ in := map[string]interface{}{
+ "foo": "bar",
+ "one": 1,
+ "true": true,
+ "down": map[string]interface{}{
+ "the": map[string]interface{}{
+ "rabbit": map[string]interface{}{
+ "hole": true,
+ },
+ },
+ },
+ }
+ assert.Equal(t, expected, ToJSONPretty(" ", in))
+}
+
+func TestToYAML(t *testing.T) {
+ expected := `d: 2006-01-02T15:04:05.999999999-07:00
+foo: bar
+? |-
+ multi
+ line
+ key
+: hello: world
+one: 1
+"true": true
+`
+ mst, _ := time.LoadLocation("MST")
+ in := map[string]interface{}{
+ "foo": "bar",
+ "one": 1,
+ "true": true,
+ `multi
+line
+key`: map[string]interface{}{
+ "hello": "world",
+ },
+ "d": time.Date(2006, time.January, 2, 15, 4, 5, 999999999, mst),
+ }
+ assert.Equal(t, expected, ToYAML(in))
+}
+
+func TestCSV(t *testing.T) {
+ in := "first,second,third\n1,2,3\n4,5,6"
+ expected := [][]string{
+ {"first", "second", "third"},
+ {"1", "2", "3"},
+ {"4", "5", "6"},
+ }
+ assert.Equal(t, expected, CSV(in))
+
+ in = "first;second;third\r\n1;2;3\r\n4;5;6\r\n"
+ assert.Equal(t, expected, CSV(";", in))
+}
+
+func TestCSVByRow(t *testing.T) {
+ in := "first,second,third\n1,2,3\n4,5,6"
+ expected := []map[string]string{
+ {
+ "first": "1",
+ "second": "2",
+ "third": "3",
+ },
+ {
+ "first": "4",
+ "second": "5",
+ "third": "6",
+ },
+ }
+ assert.Equal(t, expected, CSVByRow(in))
+
+ in = "1,2,3\n4,5,6"
+ assert.Equal(t, expected, CSVByRow("first,second,third", in))
+
+ in = "1;2;3\n4;5;6"
+ assert.Equal(t, expected, CSVByRow(";", "first;second;third", in))
+
+ in = "first;second;third\r\n1;2;3\r\n4;5;6"
+ assert.Equal(t, expected, CSVByRow(";", in))
+
+ expected = []map[string]string{
+ {"A": "1", "B": "2", "C": "3"},
+ {"A": "4", "B": "5", "C": "6"},
+ }
+
+ in = "1,2,3\n4,5,6"
+ assert.Equal(t, expected, CSVByRow("", in))
+
+ expected = []map[string]string{
+ {"A": "1", "B": "1", "C": "1", "D": "1", "E": "1", "F": "1", "G": "1", "H": "1", "I": "1", "J": "1", "K": "1", "L": "1", "M": "1", "N": "1", "O": "1", "P": "1", "Q": "1", "R": "1", "S": "1", "T": "1", "U": "1", "V": "1", "W": "1", "X": "1", "Y": "1", "Z": "1", "AA": "1", "BB": "1", "CC": "1", "DD": "1"},
+ }
+
+ in = "1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1"
+ assert.Equal(t, expected, CSVByRow("", in))
+}
+
+func TestCSVByColumn(t *testing.T) {
+ in := "first,second,third\n1,2,3\n4,5,6"
+ expected := map[string][]string{
+ "first": {"1", "4"},
+ "second": {"2", "5"},
+ "third": {"3", "6"},
+ }
+ assert.Equal(t, expected, CSVByColumn(in))
+
+ in = "1,2,3\n4,5,6"
+ assert.Equal(t, expected, CSVByColumn("first,second,third", in))
+
+ in = "1;2;3\n4;5;6"
+ assert.Equal(t, expected, CSVByColumn(";", "first;second;third", in))
+
+ in = "first;second;third\r\n1;2;3\r\n4;5;6"
+ assert.Equal(t, expected, CSVByColumn(";", in))
+
+ expected = map[string][]string{
+ "A": {"1", "4"},
+ "B": {"2", "5"},
+ "C": {"3", "6"},
+ }
+
+ in = "1,2,3\n4,5,6"
+ assert.Equal(t, expected, CSVByColumn("", in))
+}
+
+func TestAutoIndex(t *testing.T) {
+ assert.Equal(t, "A", autoIndex(0))
+ assert.Equal(t, "B", autoIndex(1))
+ assert.Equal(t, "Z", autoIndex(25))
+ assert.Equal(t, "AA", autoIndex(26))
+ assert.Equal(t, "ZZ", autoIndex(51))
+ assert.Equal(t, "AAA", autoIndex(52))
+ assert.Equal(t, "YYYYY", autoIndex(128))
+}
+
+func TestToCSV(t *testing.T) {
+ in := [][]string{
+ {"first", "second", "third"},
+ {"1", "2", "3"},
+ {"4", "5", "6"},
+ }
+ expected := "first,second,third\r\n1,2,3\r\n4,5,6\r\n"
+
+ assert.Equal(t, expected, ToCSV(in))
+
+ expected = "first;second;third\r\n1;2;3\r\n4;5;6\r\n"
+
+ assert.Equal(t, expected, ToCSV(";", in))
+}
+
+func TestTOML(t *testing.T) {
+ in := `# This is a TOML document. Boom.
+
+title = "TOML Example"
+
+[owner]
+name = "Tom Preston-Werner"
+organization = "GitHub"
+bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
+dob = 1979-05-27T07:32:00Z # First class dates? Why not?
+
+[database]
+server = "192.168.1.1"
+ports = [ 8001, 8001, 8002 ]
+connection_max = 5000
+enabled = true
+
+[servers]
+
+ # You can indent as you please. Tabs or spaces. TOML don't care.
+ [servers.alpha]
+ ip = "10.0.0.1"
+ dc = "eqdc10"
+
+ [servers.beta]
+ ip = "10.0.0.2"
+ dc = "eqdc10"
+
+[clients]
+data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
+
+# Line breaks are OK when inside arrays
+hosts = [
+ "alpha",
+ "omega"
+]
+`
+ expected := map[string]interface{}{
+ "title": "TOML Example",
+ "owner": map[string]interface{}{
+ "name": "Tom Preston-Werner",
+ "organization": "GitHub",
+ "bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
+ "dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
+ },
+ "database": map[string]interface{}{
+ "server": "192.168.1.1",
+ "ports": []interface{}{int64(8001), int64(8001), int64(8002)},
+ "connection_max": int64(5000),
+ "enabled": true,
+ },
+ "servers": map[string]interface{}{
+ "alpha": map[string]interface{}{
+ "ip": "10.0.0.1",
+ "dc": "eqdc10",
+ },
+ "beta": map[string]interface{}{
+ "ip": "10.0.0.2",
+ "dc": "eqdc10",
+ },
+ },
+ "clients": map[string]interface{}{
+ "data": []interface{}{
+ []interface{}{"gamma", "delta"},
+ []interface{}{int64(1), int64(2)},
+ },
+ "hosts": []interface{}{"alpha", "omega"},
+ },
+ }
+
+ assert.Equal(t, expected, TOML(in))
+}
+
+func TestToTOML(t *testing.T) {
+ expected := `foo = "bar"
+one = 1
+true = true
+
+[down]
+ [down.the]
+ [down.the.rabbit]
+ hole = true
+`
+ in := map[string]interface{}{
+ "foo": "bar",
+ "one": 1,
+ "true": true,
+ "down": map[interface{}]interface{}{
+ "the": map[interface{}]interface{}{
+ "rabbit": map[interface{}]interface{}{
+ "hole": true,
+ },
+ },
+ },
+ }
+ assert.Equal(t, expected, ToTOML(in))
+}
diff --git a/data/datasource.go b/data/datasource.go
new file mode 100644
index 00000000..5e626e0c
--- /dev/null
+++ b/data/datasource.go
@@ -0,0 +1,452 @@
+package data
+
+import (
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "mime"
+ "net/http"
+ "net/url"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "github.com/blang/vfs"
+ "github.com/hairyhenderson/gomplate/libkv"
+ "github.com/hairyhenderson/gomplate/vault"
+)
+
+// logFatal is defined so log.Fatal calls can be overridden for testing
+var logFatalf = log.Fatalf
+
+func regExtension(ext, typ string) {
+ err := mime.AddExtensionType(ext, typ)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
+
+func init() {
+ // Add some types we want to be able to handle which can be missing by default
+ regExtension(".json", "application/json")
+ regExtension(".yml", "application/yaml")
+ regExtension(".yaml", "application/yaml")
+ regExtension(".csv", "text/csv")
+ regExtension(".toml", "application/toml")
+
+ sourceReaders = make(map[string]func(*Source, ...string) ([]byte, error))
+
+ // Register our source-reader functions
+ addSourceReader("http", readHTTP)
+ addSourceReader("https", readHTTP)
+ addSourceReader("file", readFile)
+ addSourceReader("vault", readVault)
+ addSourceReader("consul", readConsul)
+ addSourceReader("consul+http", readConsul)
+ addSourceReader("consul+https", readConsul)
+ addSourceReader("boltdb", readBoltDB)
+}
+
+var sourceReaders map[string]func(*Source, ...string) ([]byte, error)
+
+// addSourceReader -
+func addSourceReader(scheme string, readFunc func(*Source, ...string) ([]byte, error)) {
+ sourceReaders[scheme] = readFunc
+}
+
+// Data -
+type Data struct {
+ Sources map[string]*Source
+ cache map[string][]byte
+}
+
+// Cleanup - clean up datasources before shutting the process down - things
+// like Logging out happen here
+func (d *Data) Cleanup() {
+ for _, s := range d.Sources {
+ s.cleanup()
+ }
+}
+
+// NewData - constructor for Data
+func NewData(datasourceArgs []string, headerArgs []string) *Data {
+ sources := make(map[string]*Source)
+ headers := parseHeaderArgs(headerArgs)
+ for _, v := range datasourceArgs {
+ s, err := ParseSource(v)
+ if err != nil {
+ log.Fatalf("error parsing datasource %v", err)
+ return nil
+ }
+ s.Header = headers[s.Alias]
+ sources[s.Alias] = s
+ }
+ return &Data{
+ Sources: sources,
+ }
+}
+
+// Source - a data source
+type Source struct {
+ Alias string
+ URL *url.URL
+ Ext string
+ Type string
+ Params map[string]string
+ FS vfs.Filesystem // used for file: URLs, nil otherwise
+ HC *http.Client // used for http[s]: URLs, nil otherwise
+ VC *vault.Vault // used for vault: URLs, nil otherwise
+ KV *libkv.LibKV // used for consul:, etcd:, zookeeper: & boltdb: URLs, nil otherwise
+ Header http.Header // used for http[s]: URLs, nil otherwise
+}
+
+func (s *Source) cleanup() {
+ if s.VC != nil {
+ s.VC.Logout()
+ }
+ if s.KV != nil {
+ s.KV.Logout()
+ }
+}
+
+// NewSource - builds a &Source
+func NewSource(alias string, URL *url.URL) (s *Source) {
+ ext := filepath.Ext(URL.Path)
+
+ s = &Source{
+ Alias: alias,
+ URL: URL,
+ Ext: ext,
+ }
+
+ if ext != "" && URL.Scheme != "boltdb" {
+ mediatype := mime.TypeByExtension(ext)
+ t, params, err := mime.ParseMediaType(mediatype)
+ if err != nil {
+ log.Fatal(err)
+ }
+ s.Type = t
+ s.Params = params
+ }
+ return
+}
+
+// String is the method to format the flag's value, part of the flag.Value interface.
+// The String method's output will be used in diagnostics.
+func (s *Source) String() string {
+ return fmt.Sprintf("%s=%s (%s)", s.Alias, s.URL.String(), s.Type)
+}
+
+// ParseSource -
+func ParseSource(value string) (*Source, error) {
+ var (
+ alias string
+ srcURL *url.URL
+ )
+ parts := strings.SplitN(value, "=", 2)
+ if len(parts) == 1 {
+ f := parts[0]
+ alias = strings.SplitN(value, ".", 2)[0]
+ if path.Base(f) != f {
+ err := fmt.Errorf("Invalid datasource (%s). Must provide an alias with files not in working directory", value)
+ return nil, err
+ }
+ srcURL = absURL(f)
+ } else if len(parts) == 2 {
+ alias = parts[0]
+ var err error
+ srcURL, err = url.Parse(parts[1])
+ if err != nil {
+ return nil, err
+ }
+
+ if !srcURL.IsAbs() {
+ srcURL = absURL(parts[1])
+ }
+ }
+
+ s := NewSource(alias, srcURL)
+ return s, nil
+}
+
+func absURL(value string) *url.URL {
+ cwd, err := os.Getwd()
+ if err != nil {
+ log.Fatalf("Can't get working directory: %s", err)
+ }
+ urlCwd := strings.Replace(cwd, string(os.PathSeparator), "/", -1)
+ baseURL := &url.URL{
+ Scheme: "file",
+ Path: urlCwd + "/",
+ }
+ relURL := &url.URL{
+ Path: value,
+ }
+ return baseURL.ResolveReference(relURL)
+}
+
+// DatasourceExists -
+func (d *Data) DatasourceExists(alias string) bool {
+ _, ok := d.Sources[alias]
+ return ok
+}
+
+const plaintext = "text/plain"
+
+// Datasource -
+func (d *Data) Datasource(alias string, args ...string) interface{} {
+ source, ok := d.Sources[alias]
+ if !ok {
+ log.Fatalf("Undefined datasource '%s'", alias)
+ }
+ b, err := d.ReadSource(source, args...)
+ if err != nil {
+ log.Fatalf("Couldn't read datasource '%s': %s", alias, err)
+ }
+ s := string(b)
+ if source.Type == "application/json" {
+ return JSON(s)
+ }
+ if source.Type == "application/yaml" {
+ return YAML(s)
+ }
+ if source.Type == "text/csv" {
+ return CSV(s)
+ }
+ if source.Type == "application/toml" {
+ return TOML(s)
+ }
+ if source.Type == plaintext {
+ return s
+ }
+ log.Fatalf("Datasources of type %s not yet supported", source.Type)
+ return nil
+}
+
+// Include -
+func (d *Data) Include(alias string, args ...string) string {
+ source, ok := d.Sources[alias]
+ if !ok {
+ log.Fatalf("Undefined datasource '%s'", alias)
+ }
+ b, err := d.ReadSource(source, args...)
+ if err != nil {
+ log.Fatalf("Couldn't read datasource '%s': %s", alias, err)
+ }
+ return string(b)
+}
+
+// ReadSource -
+func (d *Data) ReadSource(source *Source, args ...string) ([]byte, error) {
+ if d.cache == nil {
+ d.cache = make(map[string][]byte)
+ }
+ cacheKey := source.Alias
+ for _, v := range args {
+ cacheKey += v
+ }
+ cached, ok := d.cache[cacheKey]
+ if ok {
+ return cached, nil
+ }
+ if r, ok := sourceReaders[source.URL.Scheme]; ok {
+ data, err := r(source, args...)
+ if err != nil {
+ return nil, err
+ }
+ d.cache[cacheKey] = data
+ return data, nil
+ }
+
+ log.Fatalf("Datasources with scheme %s not yet supported", source.URL.Scheme)
+ return nil, nil
+}
+
+func readFile(source *Source, args ...string) ([]byte, error) {
+ if source.FS == nil {
+ source.FS = vfs.OS()
+ }
+
+ p := filepath.FromSlash(source.URL.Path)
+
+ // make sure we can access the file
+ _, err := source.FS.Stat(p)
+ if err != nil {
+ log.Fatalf("Can't stat %s: %#v", p, err)
+ return nil, err
+ }
+
+ f, err := source.FS.OpenFile(p, os.O_RDONLY, 0)
+ if err != nil {
+ log.Fatalf("Can't open %s: %#v", p, err)
+ return nil, err
+ }
+
+ b, err := ioutil.ReadAll(f)
+ if err != nil {
+ log.Fatalf("Can't read %s: %#v", p, err)
+ return nil, err
+ }
+ return b, nil
+}
+
+func readHTTP(source *Source, args ...string) ([]byte, error) {
+ if source.HC == nil {
+ source.HC = &http.Client{Timeout: time.Second * 5}
+ }
+ req, err := http.NewRequest("GET", source.URL.String(), nil)
+ if err != nil {
+ return nil, err
+ }
+ req.Header = source.Header
+ res, err := source.HC.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ body, err := ioutil.ReadAll(res.Body)
+ if err != nil {
+ return nil, err
+ }
+ err = res.Body.Close()
+ if err != nil {
+ return nil, err
+ }
+ if res.StatusCode != 200 {
+ err := fmt.Errorf("Unexpected HTTP status %d on GET from %s: %s", res.StatusCode, source.URL, string(body))
+ return nil, err
+ }
+ ctypeHdr := res.Header.Get("Content-Type")
+ if ctypeHdr != "" {
+ mediatype, params, e := mime.ParseMediaType(ctypeHdr)
+ if e != nil {
+ return nil, e
+ }
+ source.Type = mediatype
+ source.Params = params
+ }
+ return body, nil
+}
+
+func readVault(source *Source, args ...string) ([]byte, error) {
+ if source.VC == nil {
+ source.VC = vault.New()
+ source.VC.Login()
+ }
+
+ params := make(map[string]interface{})
+
+ p := source.URL.Path
+
+ for key, val := range source.URL.Query() {
+ params[key] = strings.Join(val, " ")
+ }
+
+ if len(args) == 1 {
+ parsed, err := url.Parse(args[0])
+ if err != nil {
+ return nil, err
+ }
+
+ if parsed.Path != "" {
+ p = p + "/" + parsed.Path
+ }
+
+ for key, val := range parsed.Query() {
+ params[key] = strings.Join(val, " ")
+ }
+ }
+
+ var data []byte
+ var err error
+
+ if len(params) > 0 {
+ data, err = source.VC.Write(p, params)
+ } else {
+ data, err = source.VC.Read(p)
+ }
+ if err != nil {
+ return nil, err
+ }
+ source.Type = "application/json"
+
+ return data, nil
+}
+
+func readConsul(source *Source, args ...string) ([]byte, error) {
+ if source.KV == nil {
+ source.KV = libkv.NewConsul(source.URL)
+ err := source.KV.Login()
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ p := source.URL.Path
+ if len(args) == 1 {
+ p = p + "/" + args[0]
+ }
+
+ data, err := source.KV.Read(p)
+ if err != nil {
+ return nil, err
+ }
+ source.Type = plaintext
+
+ return data, nil
+}
+
+func readBoltDB(source *Source, args ...string) ([]byte, error) {
+ if source.KV == nil {
+ source.KV = libkv.NewBoltDB(source.URL)
+ }
+
+ if len(args) != 1 {
+ return nil, errors.New("missing key")
+ }
+ p := args[0]
+
+ data, err := source.KV.Read(p)
+ if err != nil {
+ return nil, err
+ }
+ source.Type = plaintext
+
+ return data, nil
+}
+
+func parseHeaderArgs(headerArgs []string) map[string]http.Header {
+ headers := make(map[string]http.Header)
+ for _, v := range headerArgs {
+ ds, name, value := splitHeaderArg(v)
+ if _, ok := headers[ds]; !ok {
+ headers[ds] = make(http.Header)
+ }
+ headers[ds][name] = append(headers[ds][name], strings.TrimSpace(value))
+ }
+ return headers
+}
+
+func splitHeaderArg(arg string) (datasourceAlias, name, value string) {
+ parts := strings.SplitN(arg, "=", 2)
+ if len(parts) != 2 {
+ logFatalf("Invalid datasource-header option '%s'", arg)
+ return "", "", ""
+ }
+ datasourceAlias = parts[0]
+ name, value = splitHeader(parts[1])
+ return datasourceAlias, name, value
+}
+
+func splitHeader(header string) (name, value string) {
+ parts := strings.SplitN(header, ":", 2)
+ if len(parts) != 2 {
+ logFatalf("Invalid HTTP Header format '%s'", header)
+ return "", ""
+ }
+ name = http.CanonicalHeaderKey(parts[0])
+ value = parts[1]
+ return name, value
+}
diff --git a/data/datasource_test.go b/data/datasource_test.go
new file mode 100644
index 00000000..31b66eee
--- /dev/null
+++ b/data/datasource_test.go
@@ -0,0 +1,300 @@
+// +build !windows
+
+package data
+
+import (
+ "encoding/json"
+ "fmt"
+ "log"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "testing"
+
+ "github.com/blang/vfs"
+ "github.com/blang/vfs/memfs"
+ "github.com/stretchr/testify/assert"
+)
+
+var spyLogFatalfMsg string
+
+func restoreLogFatalf() {
+ logFatalf = log.Fatalf
+}
+
+func mockLogFatalf(msg string, args ...interface{}) {
+ spyLogFatalfMsg = msg
+ panic(spyLogFatalfMsg)
+}
+
+func setupMockLogFatalf() {
+ logFatalf = mockLogFatalf
+ spyLogFatalfMsg = ""
+}
+
+func TestNewSource(t *testing.T) {
+ s := NewSource("foo", &url.URL{
+ Scheme: "file",
+ Path: "/foo.json",
+ })
+ assert.Equal(t, "application/json", s.Type)
+ assert.Equal(t, ".json", s.Ext)
+
+ s = NewSource("foo", &url.URL{
+ Scheme: "http",
+ Host: "example.com",
+ Path: "/foo.json",
+ })
+ assert.Equal(t, "application/json", s.Type)
+ assert.Equal(t, ".json", s.Ext)
+
+ s = NewSource("foo", &url.URL{
+ Scheme: "ftp",
+ Host: "example.com",
+ Path: "/foo.json",
+ })
+ assert.Equal(t, "application/json", s.Type)
+ assert.Equal(t, ".json", s.Ext)
+}
+
+func TestNewData(t *testing.T) {
+ d := NewData(nil, nil)
+ assert.Len(t, d.Sources, 0)
+
+ d = NewData([]string{"foo=http:///foo.json"}, nil)
+ assert.Equal(t, "/foo.json", d.Sources["foo"].URL.Path)
+
+ d = NewData([]string{"foo=http:///foo.json"}, []string{})
+ assert.Equal(t, "/foo.json", d.Sources["foo"].URL.Path)
+ assert.Empty(t, d.Sources["foo"].Header)
+
+ d = NewData([]string{"foo=http:///foo.json"}, []string{"bar=Accept: blah"})
+ assert.Equal(t, "/foo.json", d.Sources["foo"].URL.Path)
+ assert.Empty(t, d.Sources["foo"].Header)
+
+ d = NewData([]string{"foo=http:///foo.json"}, []string{"foo=Accept: blah"})
+ assert.Equal(t, "/foo.json", d.Sources["foo"].URL.Path)
+ assert.Equal(t, "blah", d.Sources["foo"].Header["Accept"][0])
+}
+
+func TestParseSourceNoAlias(t *testing.T) {
+ s, err := ParseSource("foo.json")
+ assert.NoError(t, err)
+ assert.Equal(t, "foo", s.Alias)
+
+ _, err = ParseSource("../foo.json")
+ assert.Error(t, err)
+
+ _, err = ParseSource("ftp://example.com/foo.yml")
+ assert.Error(t, err)
+}
+
+func TestParseSourceWithAlias(t *testing.T) {
+ s, err := ParseSource("data=foo.json")
+ assert.NoError(t, err)
+ assert.Equal(t, "data", s.Alias)
+ assert.Equal(t, "file", s.URL.Scheme)
+ assert.Equal(t, "application/json", s.Type)
+ assert.True(t, s.URL.IsAbs())
+
+ s, err = ParseSource("data=/otherdir/foo.json")
+ assert.NoError(t, err)
+ assert.Equal(t, "data", s.Alias)
+ assert.Equal(t, "file", s.URL.Scheme)
+ assert.True(t, s.URL.IsAbs())
+ assert.Equal(t, "/otherdir/foo.json", s.URL.Path)
+
+ s, err = ParseSource("data=sftp://example.com/blahblah/foo.json")
+ assert.NoError(t, err)
+ assert.Equal(t, "data", s.Alias)
+ assert.Equal(t, "sftp", s.URL.Scheme)
+ assert.True(t, s.URL.IsAbs())
+ assert.Equal(t, "/blahblah/foo.json", s.URL.Path)
+}
+
+func TestDatasource(t *testing.T) {
+ test := func(ext, mime, contents string) {
+ fname := "foo." + ext
+ fs := memfs.Create()
+ _ = fs.Mkdir("/tmp", 0777)
+ f, _ := vfs.Create(fs, "/tmp/"+fname)
+ _, _ = f.Write([]byte(contents))
+
+ sources := map[string]*Source{
+ "foo": {
+ Alias: "foo",
+ URL: &url.URL{Scheme: "file", Path: "/tmp/" + fname},
+ Ext: ext,
+ Type: mime,
+ FS: fs,
+ },
+ }
+ data := &Data{
+ Sources: sources,
+ }
+ expected := map[string]interface{}{"hello": map[interface{}]interface{}{"cruel": "world"}}
+ actual := data.Datasource("foo")
+ assert.Equal(t, expected, actual)
+ }
+
+ test("json", "application/json", `{"hello":{"cruel":"world"}}`)
+ test("yml", "application/yaml", "hello:\n cruel: world\n")
+}
+
+func TestDatasourceExists(t *testing.T) {
+ sources := map[string]*Source{
+ "foo": {Alias: "foo"},
+ }
+ data := &Data{Sources: sources}
+ assert.True(t, data.DatasourceExists("foo"))
+ assert.False(t, data.DatasourceExists("bar"))
+}
+
+func setupHTTP(code int, mimetype string, body string) (*httptest.Server, *http.Client) {
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+
+ w.Header().Set("Content-Type", mimetype)
+ w.WriteHeader(code)
+ if body == "" {
+ // mirror back the headers
+ fmt.Fprintln(w, marshalObj(r.Header, json.Marshal))
+ } else {
+ fmt.Fprintln(w, body)
+ }
+ }))
+
+ client := &http.Client{
+ Transport: &http.Transport{
+ Proxy: func(req *http.Request) (*url.URL, error) {
+ return url.Parse(server.URL)
+ },
+ },
+ }
+
+ return server, client
+}
+
+func TestHTTPFile(t *testing.T) {
+ server, client := setupHTTP(200, "application/json; charset=utf-8", `{"hello": "world"}`)
+ defer server.Close()
+
+ sources := make(map[string]*Source)
+ sources["foo"] = &Source{
+ Alias: "foo",
+ URL: &url.URL{
+ Scheme: "http",
+ Host: "example.com",
+ Path: "/foo",
+ },
+ HC: client,
+ }
+ data := &Data{
+ Sources: sources,
+ }
+ expected := make(map[string]interface{})
+ expected["hello"] = "world"
+ actual := data.Datasource("foo").(map[string]interface{})
+ assert.Equal(t, expected["hello"], actual["hello"])
+}
+
+func TestHTTPFileWithHeaders(t *testing.T) {
+ server, client := setupHTTP(200, "application/json", "")
+ defer server.Close()
+
+ sources := make(map[string]*Source)
+ sources["foo"] = &Source{
+ Alias: "foo",
+ URL: &url.URL{
+ Scheme: "http",
+ Host: "example.com",
+ Path: "/foo",
+ },
+ HC: client,
+ Header: http.Header{
+ "Foo": {"bar"},
+ "foo": {"baz"},
+ "User-Agent": {},
+ "Accept-Encoding": {"test"},
+ },
+ }
+ data := &Data{
+ Sources: sources,
+ }
+ expected := http.Header{
+ "Accept-Encoding": {"test"},
+ "Foo": {"bar", "baz"},
+ }
+ actual := data.Datasource("foo")
+ assert.Equal(t, marshalObj(expected, json.Marshal), marshalObj(actual, json.Marshal))
+}
+
+func TestParseHeaderArgs(t *testing.T) {
+ args := []string{
+ "foo=Accept: application/json",
+ "bar=Authorization: Bearer supersecret",
+ }
+ expected := map[string]http.Header{
+ "foo": {
+ "Accept": {"application/json"},
+ },
+ "bar": {
+ "Authorization": {"Bearer supersecret"},
+ },
+ }
+ assert.Equal(t, expected, parseHeaderArgs(args))
+
+ defer restoreLogFatalf()
+ setupMockLogFatalf()
+ assert.Panics(t, func() {
+ parseHeaderArgs([]string{"foo"})
+ })
+
+ defer restoreLogFatalf()
+ setupMockLogFatalf()
+ assert.Panics(t, func() {
+ parseHeaderArgs([]string{"foo=bar"})
+ })
+
+ args = []string{
+ "foo=Accept: application/json",
+ "foo=Foo: bar",
+ "foo=foo: baz",
+ "foo=fOO: qux",
+ "bar=Authorization: Bearer supersecret",
+ }
+ expected = map[string]http.Header{
+ "foo": {
+ "Accept": {"application/json"},
+ "Foo": {"bar", "baz", "qux"},
+ },
+ "bar": {
+ "Authorization": {"Bearer supersecret"},
+ },
+ }
+ assert.Equal(t, expected, parseHeaderArgs(args))
+}
+
+func TestInclude(t *testing.T) {
+ ext := "txt"
+ contents := "hello world"
+ fname := "foo." + ext
+ fs := memfs.Create()
+ _ = fs.Mkdir("/tmp", 0777)
+ f, _ := vfs.Create(fs, "/tmp/"+fname)
+ _, _ = f.Write([]byte(contents))
+
+ sources := map[string]*Source{
+ "foo": {
+ Alias: "foo",
+ URL: &url.URL{Scheme: "file", Path: "/tmp/" + fname},
+ Ext: ext,
+ Type: "text/plain",
+ FS: fs,
+ },
+ }
+ data := &Data{
+ Sources: sources,
+ }
+ actual := data.Include("foo")
+ assert.Equal(t, contents, actual)
+}