summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--context.go28
-rw-r--r--context_test.go21
-rw-r--r--data/datasource.go364
-rw-r--r--data/datasource_test.go397
-rw-r--r--data/mimetypes.go33
-rw-r--r--funcs.go7
-rw-r--r--gomplate.go2
-rw-r--r--gomplate_test.go6
-rw-r--r--internal/datafs/context.go16
-rw-r--r--internal/datafs/mergefs.go24
-rw-r--r--internal/datafs/mergefs_test.go28
-rw-r--r--internal/datafs/reader.go226
-rw-r--r--internal/datafs/reader_test.go162
-rw-r--r--internal/datafs/registry.go88
-rw-r--r--internal/datafs/registry_test.go48
-rw-r--r--internal/funcs/data.go12
-rw-r--r--internal/funcs/data_test.go2
-rw-r--r--internal/funcs/datasource.go103
-rw-r--r--internal/funcs/datasource_test.go217
-rw-r--r--internal/iohelpers/mimetypes_test.go (renamed from data/mimetypes_test.go)10
-rw-r--r--internal/tests/integration/datasources_vault_ec2_test.go6
-rw-r--r--internal/tests/integration/datasources_vault_test.go2
-rw-r--r--render.go37
23 files changed, 940 insertions, 899 deletions
diff --git a/context.go b/context.go
index f98ee3b3..d84a2967 100644
--- a/context.go
+++ b/context.go
@@ -5,7 +5,8 @@ import (
"os"
"strings"
- "github.com/hairyhenderson/gomplate/v4/data"
+ "github.com/hairyhenderson/gomplate/v4/internal/datafs"
+ "github.com/hairyhenderson/gomplate/v4/internal/parsers"
)
// context for templates
@@ -24,26 +25,25 @@ func (c *tmplctx) Env() map[string]string {
// createTmplContext reads the datasources for the given aliases
func createTmplContext(
ctx context.Context, aliases []string,
- //nolint:staticcheck
- d *data.Data,
+ sr datafs.DataSourceReader,
) (interface{}, error) {
- // we need to inject the current context into the Data value, because
- // the Datasource method may need it
- // TODO: remove this before v4
- if d != nil {
- d.Ctx = ctx
- }
-
- var err error
tctx := &tmplctx{}
for _, a := range aliases {
- if a == "." {
- return d.Datasource(a)
+ ct, b, err := sr.ReadSource(ctx, a)
+ if err != nil {
+ return nil, err
}
- (*tctx)[a], err = d.Datasource(a)
+
+ content, err := parsers.ParseData(ct, string(b))
if err != nil {
return nil, err
}
+
+ if a == "." {
+ return content, nil
+ }
+
+ (*tctx)[a] = content
}
return tctx, nil
}
diff --git a/context_test.go b/context_test.go
index a92818d2..903a0fc7 100644
--- a/context_test.go
+++ b/context_test.go
@@ -7,7 +7,6 @@ import (
"testing"
"github.com/hairyhenderson/go-fsimpl"
- "github.com/hairyhenderson/gomplate/v4/data"
"github.com/hairyhenderson/gomplate/v4/internal/config"
"github.com/hairyhenderson/gomplate/v4/internal/datafs"
@@ -30,7 +29,10 @@ func TestEnvGetsUpdatedEnvironment(t *testing.T) {
func TestCreateContext(t *testing.T) {
ctx := context.Background()
- c, err := createTmplContext(ctx, nil, nil)
+ reg := datafs.NewRegistry()
+ sr := datafs.NewSourceReader(reg)
+
+ c, err := createTmplContext(ctx, nil, sr)
require.NoError(t, err)
assert.Empty(t, c)
@@ -43,15 +45,12 @@ func TestCreateContext(t *testing.T) {
barURL := "env:///bar?type=application/yaml"
uf, _ := url.Parse(fooURL)
ub, _ := url.Parse(barURL)
- //nolint:staticcheck
- d := &data.Data{
- Sources: map[string]config.DataSource{
- "foo": {URL: uf},
- ".": {URL: ub},
- },
- }
+
+ reg.Register("foo", config.DataSource{URL: uf})
+ reg.Register(".", config.DataSource{URL: ub})
+
t.Setenv("foo", "foo: bar")
- c, err = createTmplContext(ctx, []string{"foo"}, d)
+ c, err = createTmplContext(ctx, []string{"foo"}, sr)
require.NoError(t, err)
assert.IsType(t, &tmplctx{}, c)
tctx := c.(*tmplctx)
@@ -59,7 +58,7 @@ func TestCreateContext(t *testing.T) {
assert.Equal(t, "bar", ds["foo"])
t.Setenv("bar", "bar: baz")
- c, err = createTmplContext(ctx, []string{"."}, d)
+ c, err = createTmplContext(ctx, []string{"."}, sr)
require.NoError(t, err)
assert.IsType(t, map[string]interface{}{}, c)
ds = c.(map[string]interface{})
diff --git a/data/datasource.go b/data/datasource.go
deleted file mode 100644
index 86e3101c..00000000
--- a/data/datasource.go
+++ /dev/null
@@ -1,364 +0,0 @@
-package data
-
-import (
- "context"
- "encoding/json"
- "fmt"
- "io"
- "io/fs"
- "net/http"
- "net/url"
- "runtime"
- "sort"
- "strings"
-
- "github.com/hairyhenderson/go-fsimpl"
- "github.com/hairyhenderson/gomplate/v4/internal/config"
- "github.com/hairyhenderson/gomplate/v4/internal/datafs"
- "github.com/hairyhenderson/gomplate/v4/internal/parsers"
- "github.com/hairyhenderson/gomplate/v4/internal/urlhelpers"
-)
-
-// Data -
-//
-// Deprecated: will be replaced in future
-type Data struct {
- Ctx context.Context
-
- // TODO: remove this before 4.0
- Sources map[string]config.DataSource
-
- cache map[string]*fileContent
-
- // headers from the --datasource-header/-H option that don't reference datasources from the commandline
- ExtraHeaders map[string]http.Header
-}
-
-type fileContent struct {
- contentType string
- b []byte
-}
-
-// NewData - constructor for Data
-//
-// Deprecated: will be replaced in future
-func NewData(datasourceArgs, headerArgs []string) (*Data, error) {
- cfg := &config.Config{}
- err := cfg.ParseDataSourceFlags(datasourceArgs, nil, nil, headerArgs)
- if err != nil {
- return nil, err
- }
- data := FromConfig(context.Background(), cfg)
- return data, nil
-}
-
-// FromConfig - internal use only!
-func FromConfig(ctx context.Context, cfg *config.Config) *Data {
- // XXX: This is temporary, and will be replaced with something a bit cleaner
- // when datasources are refactored
- ctx = datafs.ContextWithStdin(ctx, cfg.Stdin)
-
- sources := map[string]config.DataSource{}
- for alias, d := range cfg.DataSources {
- sources[alias] = d
- }
- for alias, d := range cfg.Context {
- sources[alias] = d
- }
-
- return &Data{
- Ctx: ctx,
- Sources: sources,
- ExtraHeaders: cfg.ExtraHeaders,
- }
-}
-
-// Source - a data source
-//
-// Deprecated: will be replaced in future
-type Source struct {
- URL *url.URL
- Header http.Header // used for http[s]: URLs, nil otherwise
-}
-
-// String is the method to format the flag's value, part of the flag.Value interface.
-// The String method's output will be used in diagnostics.
-func (s *Source) String() string {
- return s.URL.String()
-}
-
-// DefineDatasource -
-func (d *Data) DefineDatasource(alias, value string) (string, error) {
- if alias == "" {
- return "", fmt.Errorf("datasource alias must be provided")
- }
- if d.DatasourceExists(alias) {
- return "", nil
- }
- srcURL, err := urlhelpers.ParseSourceURL(value)
- if err != nil {
- return "", err
- }
- s := config.DataSource{
- URL: srcURL,
- Header: d.ExtraHeaders[alias],
- }
- if d.Sources == nil {
- d.Sources = make(map[string]config.DataSource)
- }
- d.Sources[alias] = s
- return "", nil
-}
-
-// DatasourceExists -
-func (d *Data) DatasourceExists(alias string) bool {
- _, ok := d.Sources[alias]
- return ok
-}
-
-func (d *Data) lookupSource(alias string) (*config.DataSource, error) {
- source, ok := d.Sources[alias]
- if !ok {
- srcURL, err := url.Parse(alias)
- if err != nil || !srcURL.IsAbs() {
- return nil, fmt.Errorf("undefined datasource '%s': %w", alias, err)
- }
- source = config.DataSource{
- URL: srcURL,
- Header: d.ExtraHeaders[alias],
- }
- d.Sources[alias] = source
- }
-
- return &source, nil
-}
-
-func (d *Data) readDataSource(ctx context.Context, alias string, args ...string) (*fileContent, error) {
- source, err := d.lookupSource(alias)
- if err != nil {
- return nil, err
- }
- fc, err := d.readSource(ctx, alias, source, args...)
- if err != nil {
- return nil, fmt.Errorf("couldn't read datasource '%s': %w", alias, err)
- }
-
- return fc, nil
-}
-
-// Include -
-func (d *Data) Include(alias string, args ...string) (string, error) {
- fc, err := d.readDataSource(d.Ctx, alias, args...)
- if err != nil {
- return "", err
- }
-
- return string(fc.b), err
-}
-
-// Datasource -
-func (d *Data) Datasource(alias string, args ...string) (interface{}, error) {
- fc, err := d.readDataSource(d.Ctx, alias, args...)
- if err != nil {
- return nil, err
- }
-
- return parsers.ParseData(fc.contentType, string(fc.b))
-}
-
-// DatasourceReachable - Determines if the named datasource is reachable with
-// the given arguments. Reads from the datasource, and discards the returned data.
-func (d *Data) DatasourceReachable(alias string, args ...string) bool {
- source, ok := d.Sources[alias]
- if !ok {
- return false
- }
- _, err := d.readSource(d.Ctx, alias, &source, args...)
- return err == nil
-}
-
-// readSource returns the (possibly cached) data from the given source,
-// as referenced by the given args
-func (d *Data) readSource(ctx context.Context, alias string, source *config.DataSource, args ...string) (*fileContent, error) {
- if d.cache == nil {
- d.cache = make(map[string]*fileContent)
- }
- cacheKey := alias
- for _, v := range args {
- cacheKey += v
- }
- cached, ok := d.cache[cacheKey]
- if ok {
- return cached, nil
- }
-
- arg := ""
- if len(args) > 0 {
- arg = args[0]
- }
- u, err := resolveURL(source.URL, arg)
- if err != nil {
- return nil, err
- }
-
- fc, err := d.readFileContent(ctx, u, source.Header)
- if err != nil {
- return nil, fmt.Errorf("reading %s: %w", u, err)
- }
- d.cache[cacheKey] = fc
- return fc, nil
-}
-
-// readFileContent returns content from the given URL
-func (d Data) readFileContent(ctx context.Context, u *url.URL, hdr http.Header) (*fileContent, error) {
- fsys, err := datafs.FSysForPath(ctx, u.String())
- if err != nil {
- return nil, fmt.Errorf("fsys for path %v: %w", u, err)
- }
-
- u, fname := datafs.SplitFSMuxURL(u)
-
- // need to support absolute paths on local filesystem too
- // TODO: this is a hack, probably fix this?
- if u.Scheme == "file" && runtime.GOOS != "windows" {
- fname = u.Path + fname
- }
-
- fsys = fsimpl.WithContextFS(ctx, fsys)
- fsys = fsimpl.WithHeaderFS(hdr, fsys)
-
- // convert d.Sources to a map[string]config.DataSources
- // TODO: remove this when d.Sources is removed
- ds := make(map[string]config.DataSource)
- for k, v := range d.Sources {
- ds[k] = config.DataSource{
- URL: v.URL,
- Header: v.Header,
- }
- }
-
- fsys = datafs.WithDataSourcesFS(ds, fsys)
-
- f, err := fsys.Open(fname)
- if err != nil {
- return nil, fmt.Errorf("open (url: %q, name: %q): %w", u, fname, err)
- }
- defer f.Close()
-
- fi, err := f.Stat()
- if err != nil {
- return nil, fmt.Errorf("stat (url: %q, name: %q): %w", u, fname, err)
- }
-
- // possible type hint in the type query param. Contrary to spec, we allow
- // unescaped '+' characters to make it simpler to provide types like
- // "application/array+json"
- mimeType := u.Query().Get("type")
- mimeType = strings.ReplaceAll(mimeType, " ", "+")
-
- if mimeType == "" {
- mimeType = fsimpl.ContentType(fi)
- }
-
- var data []byte
-
- if fi.IsDir() {
- var dirents []fs.DirEntry
- dirents, err = fs.ReadDir(fsys, fname)
- if err != nil {
- return nil, fmt.Errorf("readDir (url: %q, name: %s): %w", u, fname, err)
- }
-
- entries := make([]string, len(dirents))
- for i, e := range dirents {
- entries[i] = e.Name()
- }
- data, err = json.Marshal(entries)
- if err != nil {
- return nil, fmt.Errorf("json.Marshal: %w", err)
- }
-
- mimeType = jsonArrayMimetype
- } else {
- data, err = io.ReadAll(f)
- if err != nil {
- return nil, fmt.Errorf("read (url: %q, name: %s): %w", u, fname, err)
- }
- }
-
- if mimeType == "" {
- // default to text/plain
- mimeType = textMimetype
- }
-
- return &fileContent{contentType: mimeType, b: data}, nil
-}
-
-// Show all datasources -
-func (d *Data) ListDatasources() []string {
- datasources := make([]string, 0, len(d.Sources))
- for source := range d.Sources {
- datasources = append(datasources, source)
- }
- sort.Strings(datasources)
- return datasources
-}
-
-// resolveURL parses the relative URL rel against base, and returns the
-// resolved URL. Differs from url.ResolveReference in that query parameters are
-// added. In case of duplicates, params from rel are used.
-func resolveURL(base *url.URL, rel string) (*url.URL, error) {
- // if there's an opaque part, there's no resolving to do - just return the
- // base URL
- if base.Opaque != "" {
- return base, nil
- }
-
- // git URLs are special - they have double-slashes that separate a repo
- // from a path in the repo. A missing double-slash means the path is the
- // root.
- switch base.Scheme {
- case "git", "git+file", "git+http", "git+https", "git+ssh":
- if strings.Contains(base.Path, "//") && strings.Contains(rel, "//") {
- return nil, fmt.Errorf("both base URL and subpath contain '//', which is not allowed in git URLs")
- }
-
- // If there's a subpath, the base path must end with '/'. This behaviour
- // is unique to git URLs - other schemes would instead drop the last
- // path element and replace with the subpath.
- if rel != "" && !strings.HasSuffix(base.Path, "/") {
- base.Path += "/"
- }
-
- // If subpath starts with '//', make it relative by prefixing a '.',
- // otherwise it'll be treated as a schemeless URI and the first part
- // will be interpreted as a hostname.
- if strings.HasPrefix(rel, "//") {
- rel = "." + rel
- }
- }
-
- relURL, err := url.Parse(rel)
- if err != nil {
- return nil, err
- }
-
- // URL.ResolveReference requires (or assumes, at least) that the base is
- // absolute. We want to support relative URLs too though, so we need to
- // correct for that.
- out := base.ResolveReference(relURL)
- if out.Scheme == "" && out.Path[0] == '/' {
- out.Path = out.Path[1:]
- }
-
- if base.RawQuery != "" {
- bq := base.Query()
- rq := relURL.Query()
- for k := range rq {
- bq.Set(k, rq.Get(k))
- }
- out.RawQuery = bq.Encode()
- }
-
- return out, nil
-}
diff --git a/data/datasource_test.go b/data/datasource_test.go
deleted file mode 100644
index 8343f4ec..00000000
--- a/data/datasource_test.go
+++ /dev/null
@@ -1,397 +0,0 @@
-package data
-
-import (
- "context"
- "net/http"
- "net/http/httptest"
- "net/url"
- "os"
- "runtime"
- "testing"
- "testing/fstest"
-
- "github.com/hairyhenderson/go-fsimpl"
- "github.com/hairyhenderson/go-fsimpl/httpfs"
- "github.com/hairyhenderson/gomplate/v4/internal/config"
- "github.com/hairyhenderson/gomplate/v4/internal/datafs"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-const osWindows = "windows"
-
-func mustParseURL(in string) *url.URL {
- u, _ := url.Parse(in)
- return u
-}
-
-func TestNewData(t *testing.T) {
- d, err := NewData(nil, nil)
- require.NoError(t, err)
- assert.Empty(t, d.Sources)
-
- d, err = NewData([]string{"foo=http:///foo.json"}, nil)
- require.NoError(t, err)
- assert.Equal(t, "/foo.json", d.Sources["foo"].URL.Path)
-
- d, err = NewData([]string{"foo=http:///foo.json"}, []string{})
- require.NoError(t, err)
- assert.Equal(t, "/foo.json", d.Sources["foo"].URL.Path)
- assert.Empty(t, d.Sources["foo"].Header)
-
- d, err = NewData([]string{"foo=http:///foo.json"}, []string{"bar=Accept: blah"})
- require.NoError(t, err)
- assert.Equal(t, "/foo.json", d.Sources["foo"].URL.Path)
- assert.Empty(t, d.Sources["foo"].Header)
-
- d, err = NewData([]string{"foo=http:///foo.json"}, []string{"foo=Accept: blah"})
- require.NoError(t, err)
- assert.Equal(t, "/foo.json", d.Sources["foo"].URL.Path)
- assert.Equal(t, "blah", d.Sources["foo"].Header["Accept"][0])
-}
-
-func TestDatasource(t *testing.T) {
- setup := func(ext string, contents []byte) *Data {
- fname := "foo." + ext
- var uPath string
- if runtime.GOOS == osWindows {
- uPath = "C:/tmp/" + fname
- } else {
- uPath = "/tmp/" + fname
- }
-
- fsys := datafs.WrapWdFS(fstest.MapFS{
- "tmp/" + fname: &fstest.MapFile{Data: contents},
- })
- ctx := datafs.ContextWithFSProvider(context.Background(), datafs.WrappedFSProvider(fsys, "file", ""))
-
- sources := map[string]config.DataSource{
- "foo": {
- URL: &url.URL{Scheme: "file", Path: uPath},
- },
- }
- return &Data{Sources: sources, Ctx: ctx}
- }
-
- test := func(ext, mime string, contents []byte, expected interface{}) {
- data := setup(ext, contents)
-
- actual, err := data.Datasource("foo", "?type="+mime)
- require.NoError(t, err)
- assert.Equal(t, expected, actual)
- }
-
- testObj := func(ext, mime string, contents []byte) {
- test(ext, mime, contents,
- map[string]interface{}{
- "hello": map[string]interface{}{"cruel": "world"},
- })
- }
-
- testObj("json", jsonMimetype, []byte(`{"hello":{"cruel":"world"}}`))
- testObj("yml", yamlMimetype, []byte("hello:\n cruel: world\n"))
- test("json", jsonMimetype, []byte(`[1, "two", true]`),
- []interface{}{1, "two", true})
- test("yaml", yamlMimetype, []byte("---\n- 1\n- two\n- true\n"),
- []interface{}{1, "two", true})
-
- d := setup("", nil)
- actual, err := d.Datasource("foo")
- require.NoError(t, err)
- assert.Equal(t, "", actual)
-
- _, err = d.Datasource("bar")
- require.Error(t, err)
-}
-
-func TestDatasourceReachable(t *testing.T) {
- fname := "foo.json"
- var uPath string
- if runtime.GOOS == osWindows {
- uPath = "C:/tmp/" + fname
- } else {
- uPath = "/tmp/" + fname
- }
-
- fsys := datafs.WrapWdFS(fstest.MapFS{
- "tmp/" + fname: &fstest.MapFile{Data: []byte("{}")},
- })
- ctx := datafs.ContextWithFSProvider(context.Background(), datafs.WrappedFSProvider(fsys, "file", ""))
-
- sources := map[string]config.DataSource{
- "foo": {
- URL: &url.URL{Scheme: "file", Path: uPath},
- },
- "bar": {
- URL: &url.URL{Scheme: "file", Path: "/bogus"},
- },
- }
- data := &Data{Sources: sources, Ctx: ctx}
-
- assert.True(t, data.DatasourceReachable("foo"))
- assert.False(t, data.DatasourceReachable("bar"))
-}
-
-func TestDatasourceExists(t *testing.T) {
- sources := map[string]config.DataSource{
- "foo": {},
- }
- data := &Data{Sources: sources}
- assert.True(t, data.DatasourceExists("foo"))
- assert.False(t, data.DatasourceExists("bar"))
-}
-
-func TestInclude(t *testing.T) {
- ext := "txt"
- contents := "hello world"
- fname := "foo." + ext
-
- var uPath string
- if runtime.GOOS == osWindows {
- uPath = "C:/tmp/" + fname
- } else {
- uPath = "/tmp/" + fname
- }
-
- fsys := datafs.WrapWdFS(fstest.MapFS{
- "tmp/" + fname: &fstest.MapFile{Data: []byte(contents)},
- })
- ctx := datafs.ContextWithFSProvider(context.Background(), datafs.WrappedFSProvider(fsys, "file", ""))
-
- sources := map[string]config.DataSource{
- "foo": {
- URL: &url.URL{Scheme: "file", Path: uPath},
- },
- }
- data := &Data{Sources: sources, Ctx: ctx}
- actual, err := data.Include("foo")
- require.NoError(t, err)
- assert.Equal(t, contents, actual)
-}
-
-func TestDefineDatasource(t *testing.T) {
- d := &Data{}
- _, err := d.DefineDatasource("", "foo.json")
- require.Error(t, err)
-
- d = &Data{}
- _, err = d.DefineDatasource("", "../foo.json")
- require.Error(t, err)
-
- d = &Data{}
- _, err = d.DefineDatasource("", "ftp://example.com/foo.yml")
- require.Error(t, err)
-
- d = &Data{}
- _, err = d.DefineDatasource("data", "foo.json")
- s := d.Sources["data"]
- require.NoError(t, err)
- assert.EqualValues(t, &url.URL{Path: "foo.json"}, s.URL)
-
- d = &Data{}
- _, err = d.DefineDatasource("data", "/otherdir/foo.json")
- s = d.Sources["data"]
- require.NoError(t, err)
- assert.Equal(t, "file", s.URL.Scheme)
- assert.True(t, s.URL.IsAbs())
- assert.Equal(t, "/otherdir/foo.json", s.URL.Path)
-
- d = &Data{}
- _, err = d.DefineDatasource("data", "sftp://example.com/blahblah/foo.json")
- s = d.Sources["data"]
- require.NoError(t, err)
- assert.Equal(t, "sftp", s.URL.Scheme)
- assert.True(t, s.URL.IsAbs())
- assert.Equal(t, "/blahblah/foo.json", s.URL.Path)
-
- d = &Data{
- Sources: map[string]config.DataSource{
- "data": {},
- },
- }
- _, err = d.DefineDatasource("data", "/otherdir/foo.json")
- s = d.Sources["data"]
- require.NoError(t, err)
- assert.Nil(t, s.URL)
-
- d = &Data{}
- _, err = d.DefineDatasource("data", "/otherdir/foo?type=application/x-env")
- require.NoError(t, err)
- s = d.Sources["data"]
- require.NotNil(t, s)
- assert.Equal(t, "/otherdir/foo", s.URL.Path)
-}
-
-func TestFromConfig(t *testing.T) {
- ctx := context.Background()
-
- cfg := &config.Config{}
- actual := FromConfig(ctx, cfg)
- expected := &Data{
- Ctx: actual.Ctx,
- Sources: map[string]config.DataSource{},
- }
- assert.EqualValues(t, expected, actual)
-
- cfg = &config.Config{
- DataSources: map[string]config.DataSource{
- "foo": {
- URL: mustParseURL("http://example.com"),
- },
- },
- }
- actual = FromConfig(ctx, cfg)
- expected = &Data{
- Ctx: actual.Ctx,
- Sources: map[string]config.DataSource{
- "foo": {
- URL: mustParseURL("http://example.com"),
- },
- },
- }
- assert.EqualValues(t, expected, actual)
-
- cfg = &config.Config{
- DataSources: map[string]config.DataSource{
- "foo": {
- URL: mustParseURL("http://foo.com"),
- },
- },
- Context: map[string]config.DataSource{
- "bar": {
- URL: mustParseURL("http://bar.com"),
- Header: http.Header{
- "Foo": []string{"bar"},
- },
- },
- },
- ExtraHeaders: map[string]http.Header{
- "baz": {
- "Foo": []string{"bar"},
- },
- },
- }
- actual = FromConfig(ctx, cfg)
- expected = &Data{
- Ctx: actual.Ctx,
- Sources: map[string]config.DataSource{
- "foo": {
- URL: mustParseURL("http://foo.com"),
- },
- "bar": {
- URL: mustParseURL("http://bar.com"),
- Header: http.Header{
- "Foo": []string{"bar"},
- },
- },
- },
- ExtraHeaders: map[string]http.Header{
- "baz": {
- "Foo": []string{"bar"},
- },
- },
- }
- assert.EqualValues(t, expected, actual)
-}
-
-func TestListDatasources(t *testing.T) {
- sources := map[string]config.DataSource{
- "foo": {},
- "bar": {},
- }
- data := &Data{Sources: sources}
-
- assert.Equal(t, []string{"bar", "foo"}, data.ListDatasources())
-}
-
-func TestResolveURL(t *testing.T) {
- out, err := resolveURL(mustParseURL("http://example.com/foo.json"), "bar.json")
- require.NoError(t, err)
- assert.Equal(t, "http://example.com/bar.json", out.String())
-
- out, err = resolveURL(mustParseURL("http://example.com/a/b/?n=2"), "bar.json?q=1")
- require.NoError(t, err)
- assert.Equal(t, "http://example.com/a/b/bar.json?n=2&q=1", out.String())
-
- out, err = resolveURL(mustParseURL("git+file:///tmp/myrepo"), "//myfile?type=application/json")
- require.NoError(t, err)
- assert.Equal(t, "git+file:///tmp/myrepo//myfile?type=application/json", out.String())
-
- out, err = resolveURL(mustParseURL("git+file:///tmp/foo/bar/"), "//myfile?type=application/json")
- require.NoError(t, err)
- assert.Equal(t, "git+file:///tmp/foo/bar//myfile?type=application/json", out.String())
-
- out, err = resolveURL(mustParseURL("git+file:///tmp/myrepo/"), ".//myfile?type=application/json")
- require.NoError(t, err)
- assert.Equal(t, "git+file:///tmp/myrepo//myfile?type=application/json", out.String())
-
- out, err = resolveURL(mustParseURL("git+file:///tmp/repo//foo.txt"), "")
- require.NoError(t, err)
- assert.Equal(t, "git+file:///tmp/repo//foo.txt", out.String())
-
- out, err = resolveURL(mustParseURL("git+file:///tmp/myrepo"), ".//myfile?type=application/json")
- require.NoError(t, err)
- assert.Equal(t, "git+file:///tmp/myrepo//myfile?type=application/json", out.String())
-
- out, err = resolveURL(mustParseURL("git+file:///tmp/myrepo//foo/?type=application/json"), "bar/myfile")
- require.NoError(t, err)
- // note that the '/' in the query string is encoded to %2F - that's OK
- assert.Equal(t, "git+file:///tmp/myrepo//foo/bar/myfile?type=application%2Fjson", out.String())
-
- // both base and relative may not contain "//"
- _, err = resolveURL(mustParseURL("git+ssh://git@example.com/foo//bar"), ".//myfile")
- require.Error(t, err)
-
- _, err = resolveURL(mustParseURL("git+ssh://git@example.com/foo//bar"), "baz//myfile")
- require.Error(t, err)
-
- // relative urls must remain relative
- out, err = resolveURL(mustParseURL("tmp/foo.json"), "")
- require.NoError(t, err)
- assert.Equal(t, "tmp/foo.json", out.String())
-}
-
-func TestReadFileContent(t *testing.T) {
- wd, _ := os.Getwd()
- t.Cleanup(func() {
- _ = os.Chdir(wd)
- })
- _ = os.Chdir("/")
-
- mux := http.NewServeMux()
- mux.HandleFunc("/foo.json", func(w http.ResponseWriter, _ *http.Request) {
- w.Header().Set("Content-Type", jsonMimetype)
- w.Write([]byte(`{"foo": "bar"}`))
- })
-
- srv := httptest.NewServer(mux)
- t.Cleanup(srv.Close)
-
- fsys := datafs.WrapWdFS(fstest.MapFS{
- "foo.json": &fstest.MapFile{Data: []byte(`{"foo": "bar"}`)},
- "dir/1.yaml": &fstest.MapFile{Data: []byte(`foo: bar`)},
- "dir/2.yaml": &fstest.MapFile{Data: []byte(`baz: qux`)},
- "dir/sub/sub1.yaml": &fstest.MapFile{Data: []byte(`quux: corge`)},
- })
-
- fsp := fsimpl.NewMux()
- fsp.Add(httpfs.FS)
- fsp.Add(datafs.WrappedFSProvider(fsys, "file", ""))
-
- ctx := datafs.ContextWithFSProvider(context.Background(), fsp)
-
- d := Data{}
-
- fc, err := d.readFileContent(ctx, mustParseURL("file:///foo.json"), nil)
- require.NoError(t, err)
- assert.Equal(t, []byte(`{"foo": "bar"}`), fc.b)
-
- fc, err = d.readFileContent(ctx, mustParseURL("dir/"), nil)
- require.NoError(t, err)
- assert.JSONEq(t, `["1.yaml", "2.yaml", "sub"]`, string(fc.b))
-
- fc, err = d.readFileContent(ctx, mustParseURL(srv.URL+"/foo.json"), nil)
- require.NoError(t, err)
- assert.Equal(t, []byte(`{"foo": "bar"}`), fc.b)
-}
diff --git a/data/mimetypes.go b/data/mimetypes.go
deleted file mode 100644
index 24ea87de..00000000
--- a/data/mimetypes.go
+++ /dev/null
@@ -1,33 +0,0 @@
-package data
-
-import (
- "mime"
-)
-
-const (
- textMimetype = "text/plain"
- csvMimetype = "text/csv"
- jsonMimetype = "application/json"
- jsonArrayMimetype = "application/array+json"
- tomlMimetype = "application/toml"
- yamlMimetype = "application/yaml"
- envMimetype = "application/x-env"
- cueMimetype = "application/cue"
-)
-
-// mimeTypeAliases defines a mapping for non-canonical mime types that are
-// sometimes seen in the wild
-var mimeTypeAliases = map[string]string{
- "application/x-yaml": yamlMimetype,
- "application/text": textMimetype,
-}
-
-func mimeAlias(m string) string {
- // normalize the type by removing any extra parameters
- m, _, _ = mime.ParseMediaType(m)
-
- if a, ok := mimeTypeAliases[m]; ok {
- return a
- }
- return m
-}
diff --git a/funcs.go b/funcs.go
index d375f5b6..93025f59 100644
--- a/funcs.go
+++ b/funcs.go
@@ -4,17 +4,14 @@ import (
"context"
"text/template"
- "github.com/hairyhenderson/gomplate/v4/data"
"github.com/hairyhenderson/gomplate/v4/internal/config"
"github.com/hairyhenderson/gomplate/v4/internal/funcs"
)
// CreateFuncs - function mappings are created here
-//
-//nolint:staticcheck
-func CreateFuncs(ctx context.Context, d *data.Data) template.FuncMap {
+func CreateFuncs(ctx context.Context) template.FuncMap {
f := template.FuncMap{}
- addToMap(f, funcs.CreateDataFuncs(ctx, d))
+ addToMap(f, funcs.CreateDataFuncs(ctx))
addToMap(f, funcs.CreateAWSFuncs(ctx))
addToMap(f, funcs.CreateGCPFuncs(ctx))
addToMap(f, funcs.CreateBase64Funcs(ctx))
diff --git a/gomplate.go b/gomplate.go
index a2567b53..1e27f253 100644
--- a/gomplate.go
+++ b/gomplate.go
@@ -86,7 +86,7 @@ func simpleNamer(outDir string) func(ctx context.Context, inPath string) (string
func mappingNamer(outMap string, tr *Renderer) func(context.Context, string) (string, error) {
return func(ctx context.Context, inPath string) (string, error) {
- tcontext, err := createTmplContext(ctx, tr.tctxAliases, tr.data)
+ tcontext, err := createTmplContext(ctx, tr.tctxAliases, tr.sr)
if err != nil {
return "", err
}
diff --git a/gomplate_test.go b/gomplate_test.go
index 62ae4525..e9b885cf 100644
--- a/gomplate_test.go
+++ b/gomplate_test.go
@@ -10,8 +10,8 @@ import (
"github.com/hairyhenderson/gomplate/v4/aws"
"github.com/hairyhenderson/gomplate/v4/conv"
- "github.com/hairyhenderson/gomplate/v4/data"
"github.com/hairyhenderson/gomplate/v4/env"
+ "github.com/hairyhenderson/gomplate/v4/internal/datafs"
"github.com/hairyhenderson/gomplate/v4/internal/parsers"
"github.com/stretchr/testify/assert"
@@ -179,9 +179,9 @@ func TestSimpleNamer(t *testing.T) {
func TestMappingNamer(t *testing.T) {
ctx := context.Background()
+ reg := datafs.NewRegistry()
tr := &Renderer{
- //nolint:staticcheck
- data: &data.Data{},
+ sr: datafs.NewSourceReader(reg),
funcs: map[string]interface{}{
"foo": func() string { return "foo" },
},
diff --git a/internal/datafs/context.go b/internal/datafs/context.go
index 7f1235bf..30722f2b 100644
--- a/internal/datafs/context.go
+++ b/internal/datafs/context.go
@@ -5,8 +5,6 @@ import (
"io"
"io/fs"
"os"
-
- "github.com/hairyhenderson/gomplate/v4/internal/config"
)
// withContexter is an fs.FS that can be configured with a custom context
@@ -15,16 +13,16 @@ type withContexter interface {
WithContext(ctx context.Context) fs.FS
}
-type withDataSourceser interface {
- WithDataSources(sources map[string]config.DataSource) fs.FS
+type withDataSourceRegistryer interface {
+ WithDataSourceRegistry(registry Registry) fs.FS
}
-// WithDataSourcesFS injects a datasource map into the filesystem fs, if the
-// filesystem supports it (i.e. has a WithDataSources method). This is used for
+// WithDataSourceRegistryFS injects a datasource registry into the filesystem fs, if the
+// filesystem supports it (i.e. has a WithDataSourceRegistry method). This is used for
// the mergefs filesystem.
-func WithDataSourcesFS(sources map[string]config.DataSource, fsys fs.FS) fs.FS {
- if fsys, ok := fsys.(withDataSourceser); ok {
- return fsys.WithDataSources(sources)
+func WithDataSourceRegistryFS(registry Registry, fsys fs.FS) fs.FS {
+ if fsys, ok := fsys.(withDataSourceRegistryer); ok {
+ return fsys.WithDataSourceRegistry(registry)
}
return fsys
diff --git a/internal/datafs/mergefs.go b/internal/datafs/mergefs.go
index 7c612382..38302cf2 100644
--- a/internal/datafs/mergefs.go
+++ b/internal/datafs/mergefs.go
@@ -26,8 +26,8 @@ import (
// paths. Only a URL like "merge:" or "merge:///" makes sense here - the
// piped-separated lists of sub-sources to merge must be given to Open.
//
-// Usually you'll want to use WithDataSourcesFS to provide the map of
-// datasources that can be referenced. Otherwise, only URLs will be supported.
+// You can use WithDataSourceRegistryFS to provide the datasource registry,
+// otherwise, an empty registry will be used.
//
// An FSProvider will also be needed, which can be provided with a context
// using ContextWithFSProvider. Provide that context with fsimpl.WithContextFS.
@@ -37,24 +37,24 @@ func NewMergeFS(u *url.URL) (fs.FS, error) {
}
return &mergeFS{
- ctx: context.Background(),
- sources: map[string]config.DataSource{},
+ ctx: context.Background(),
+ registry: NewRegistry(),
}, nil
}
type mergeFS struct {
ctx context.Context
httpClient *http.Client
- sources map[string]config.DataSource
+ registry Registry
}
//nolint:gochecknoglobals
var MergeFS = fsimpl.FSProviderFunc(NewMergeFS, "merge")
var (
- _ fs.FS = (*mergeFS)(nil)
- _ withContexter = (*mergeFS)(nil)
- _ withDataSourceser = (*mergeFS)(nil)
+ _ fs.FS = (*mergeFS)(nil)
+ _ withContexter = (*mergeFS)(nil)
+ _ withDataSourceRegistryer = (*mergeFS)(nil)
)
func (f *mergeFS) WithContext(ctx context.Context) fs.FS {
@@ -79,13 +79,13 @@ func (f *mergeFS) WithHTTPClient(client *http.Client) fs.FS {
return &fsys
}
-func (f *mergeFS) WithDataSources(sources map[string]config.DataSource) fs.FS {
- if sources == nil {
+func (f *mergeFS) WithDataSourceRegistry(registry Registry) fs.FS {
+ if registry == nil {
return f
}
fsys := *f
- fsys.sources = sources
+ fsys.registry = registry
return &fsys
}
@@ -106,7 +106,7 @@ func (f *mergeFS) Open(name string) (fs.File, error) {
for i, part := range parts {
// if this is a datasource, look it up
- subSource, ok := f.sources[part]
+ subSource, ok := f.registry.Lookup(part)
if !ok {
// maybe it's a relative filename?
u, uerr := urlhelpers.ParseSourceURL(part)
diff --git a/internal/datafs/mergefs_test.go b/internal/datafs/mergefs_test.go
index 1333ca48..17ffa716 100644
--- a/internal/datafs/mergefs_test.go
+++ b/internal/datafs/mergefs_test.go
@@ -61,21 +61,19 @@ func setupMergeFsys(ctx context.Context, t *testing.T) fs.FS {
path.Join(wd, "tmp/textfile.txt"): {Data: []byte(`plain text...`)},
})
- source := config.DataSource{
+ reg := NewRegistry()
+ reg.Register("foo", config.DataSource{
URL: mustParseURL("merge:file:///tmp/jsonfile.json|file:///tmp/yamlfile.yaml"),
- }
- sources := map[string]config.DataSource{
- "foo": source,
- "bar": {URL: mustParseURL("file:///tmp/jsonfile.json")},
- "baz": {URL: mustParseURL("file:///tmp/yamlfile.yaml")},
- "text": {URL: mustParseURL("file:///tmp/textfile.txt")},
- "badscheme": {URL: mustParseURL("bad:///scheme.json")},
- // mime type overridden by URL query, should fail to parse
- "badtype": {URL: mustParseURL("file:///tmp/jsonfile.json?type=foo/bar")},
- "array": {
- URL: mustParseURL("file:///tmp/array.json?type=" + url.QueryEscape(iohelpers.JSONArrayMimetype)),
- },
- }
+ })
+ reg.Register("bar", config.DataSource{URL: mustParseURL("file:///tmp/jsonfile.json")})
+ reg.Register("baz", config.DataSource{URL: mustParseURL("file:///tmp/yamlfile.yaml")})
+ reg.Register("text", config.DataSource{URL: mustParseURL("file:///tmp/textfile.txt")})
+ reg.Register("badscheme", config.DataSource{URL: mustParseURL("bad:///scheme.json")})
+ // mime type overridden by URL query, should fail to parse
+ reg.Register("badtype", config.DataSource{URL: mustParseURL("file:///tmp/textfile.txt?type=foo/bar")})
+ reg.Register("array", config.DataSource{
+ URL: mustParseURL("file:///tmp/array.json?type=" + url.QueryEscape(iohelpers.JSONArrayMimetype)),
+ })
mux := fsimpl.NewMux()
mux.Add(MergeFS)
@@ -86,7 +84,7 @@ func setupMergeFsys(ctx context.Context, t *testing.T) fs.FS {
fsys, err := NewMergeFS(mustParseURL("merge:///"))
require.NoError(t, err)
- fsys = WithDataSourcesFS(sources, fsys)
+ fsys = WithDataSourceRegistryFS(reg, fsys)
fsys = fsimpl.WithContextFS(ctx, fsys)
return fsys
diff --git a/internal/datafs/reader.go b/internal/datafs/reader.go
new file mode 100644
index 00000000..f1af07cf
--- /dev/null
+++ b/internal/datafs/reader.go
@@ -0,0 +1,226 @@
+package datafs
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "io/fs"
+ "net/http"
+ "net/url"
+ "runtime"
+ "strings"
+
+ "github.com/hairyhenderson/go-fsimpl"
+ "github.com/hairyhenderson/gomplate/v4/internal/config"
+ "github.com/hairyhenderson/gomplate/v4/internal/iohelpers"
+)
+
+// DataSourceReader reads content from a datasource
+type DataSourceReader interface {
+ // ReadSource reads the content of a datasource, given an alias and optional
+ // arguments. If the datasource is not found, the alias is interpreted as a
+ // URL. If the alias is not a valid URL, an error is returned.
+ //
+ // Returned content is cached, so subsequent calls with the same alias and
+ // arguments will return the same content.
+ ReadSource(ctx context.Context, alias string, args ...string) (string, []byte, error)
+
+ // contains registry
+ Registry
+}
+
+type dsReader struct {
+ cache map[string]*content
+
+ Registry
+}
+
+// content type mainly for caching
+type content struct {
+ contentType string
+ b []byte
+}
+
+func NewSourceReader(reg Registry) DataSourceReader {
+ return &dsReader{Registry: reg}
+}
+
+func (d *dsReader) ReadSource(ctx context.Context, alias string, args ...string) (string, []byte, error) {
+ source, ok := d.Lookup(alias)
+ if !ok {
+ srcURL, err := url.Parse(alias)
+ if err != nil || !srcURL.IsAbs() {
+ return "", nil, fmt.Errorf("undefined datasource '%s': %w", alias, err)
+ }
+
+ d.Register(alias, config.DataSource{URL: srcURL})
+
+ // repeat the lookup now that it's registered - we shouldn't just use
+ // it directly because registration may include extra headers
+ source, _ = d.Lookup(alias)
+ }
+
+ if d.cache == nil {
+ d.cache = make(map[string]*content)
+ }
+ cacheKey := alias
+ for _, v := range args {
+ cacheKey += v
+ }
+ cached, ok := d.cache[cacheKey]
+ if ok {
+ return cached.contentType, cached.b, nil
+ }
+
+ arg := ""
+ if len(args) > 0 {
+ arg = args[0]
+ }
+ u, err := resolveURL(source.URL, arg)
+ if err != nil {
+ return "", nil, err
+ }
+
+ fc, err := d.readFileContent(ctx, u, source.Header)
+ if err != nil {
+ return "", nil, fmt.Errorf("couldn't read datasource '%s' (%s): %w", alias, u, err)
+ }
+ d.cache[cacheKey] = fc
+
+ return fc.contentType, fc.b, nil
+}
+
+func (d *dsReader) readFileContent(ctx context.Context, u *url.URL, hdr http.Header) (*content, error) {
+ fsys, err := FSysForPath(ctx, u.String())
+ if err != nil {
+ return nil, fmt.Errorf("fsys for path %v: %w", u, err)
+ }
+
+ u, fname := SplitFSMuxURL(u)
+
+ // need to support absolute paths on local filesystem too
+ // TODO: this is a hack, probably fix this?
+ if u.Scheme == "file" && runtime.GOOS != "windows" {
+ fname = u.Path + fname
+ }
+
+ fsys = fsimpl.WithContextFS(ctx, fsys)
+ fsys = fsimpl.WithHeaderFS(hdr, fsys)
+ fsys = WithDataSourceRegistryFS(d.Registry, fsys)
+
+ f, err := fsys.Open(fname)
+ if err != nil {
+ return nil, fmt.Errorf("open (url: %q, name: %q): %w", u, fname, err)
+ }
+ defer f.Close()
+
+ fi, err := f.Stat()
+ if err != nil {
+ return nil, fmt.Errorf("stat (url: %q, name: %q): %w", u, fname, err)
+ }
+
+ // possible type hint in the type query param. Contrary to spec, we allow
+ // unescaped '+' characters to make it simpler to provide types like
+ // "application/array+json"
+ mimeType := u.Query().Get("type")
+ mimeType = strings.ReplaceAll(mimeType, " ", "+")
+
+ if mimeType == "" {
+ mimeType = fsimpl.ContentType(fi)
+ }
+
+ var data []byte
+
+ if fi.IsDir() {
+ var dirents []fs.DirEntry
+ dirents, err = fs.ReadDir(fsys, fname)
+ if err != nil {
+ return nil, fmt.Errorf("readDir (url: %q, name: %s): %w", u, fname, err)
+ }
+
+ entries := make([]string, len(dirents))
+ for i, e := range dirents {
+ entries[i] = e.Name()
+ }
+ data, err = json.Marshal(entries)
+ if err != nil {
+ return nil, fmt.Errorf("json.Marshal: %w", err)
+ }
+
+ mimeType = iohelpers.JSONArrayMimetype
+ } else {
+ data, err = io.ReadAll(f)
+ if err != nil {
+ return nil, fmt.Errorf("read (url: %q, name: %s): %w", u, fname, err)
+ }
+ }
+
+ if mimeType == "" {
+ // default to text/plain
+ mimeType = iohelpers.TextMimetype
+ }
+
+ return &content{contentType: mimeType, b: data}, nil
+}
+
+// COPIED FROM /data/datasource.go
+//
+// resolveURL parses the relative URL rel against base, and returns the
+// resolved URL. Differs from url.ResolveReference in that query parameters are
+// added. In case of duplicates, params from rel are used.
+func resolveURL(base *url.URL, rel string) (*url.URL, error) {
+ // if there's an opaque part, there's no resolving to do - just return the
+ // base URL
+ if base.Opaque != "" {
+ return base, nil
+ }
+
+ // git URLs are special - they have double-slashes that separate a repo
+ // from a path in the repo. A missing double-slash means the path is the
+ // root.
+ switch base.Scheme {
+ case "git", "git+file", "git+http", "git+https", "git+ssh":
+ if strings.Contains(base.Path, "//") && strings.Contains(rel, "//") {
+ return nil, fmt.Errorf("both base URL and subpath contain '//', which is not allowed in git URLs")
+ }
+
+ // If there's a subpath, the base path must end with '/'. This behaviour
+ // is unique to git URLs - other schemes would instead drop the last
+ // path element and replace with the subpath.
+ if rel != "" && !strings.HasSuffix(base.Path, "/") {
+ base.Path += "/"
+ }
+
+ // If subpath starts with '//', make it relative by prefixing a '.',
+ // otherwise it'll be treated as a schemeless URI and the first part
+ // will be interpreted as a hostname.
+ if strings.HasPrefix(rel, "//") {
+ rel = "." + rel
+ }
+ }
+
+ relURL, err := url.Parse(rel)
+ if err != nil {
+ return nil, err
+ }
+
+ // URL.ResolveReference requires (or assumes, at least) that the base is
+ // absolute. We want to support relative URLs too though, so we need to
+ // correct for that.
+ out := base.ResolveReference(relURL)
+ if out.Scheme == "" && out.Path[0] == '/' {
+ out.Path = out.Path[1:]
+ }
+
+ if base.RawQuery != "" {
+ bq := base.Query()
+ rq := relURL.Query()
+ for k := range rq {
+ bq.Set(k, rq.Get(k))
+ }
+ out.RawQuery = bq.Encode()
+ }
+
+ return out, nil
+}
diff --git a/internal/datafs/reader_test.go b/internal/datafs/reader_test.go
new file mode 100644
index 00000000..2ec7485c
--- /dev/null
+++ b/internal/datafs/reader_test.go
@@ -0,0 +1,162 @@
+package datafs
+
+import (
+ "context"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "os"
+ "runtime"
+ "testing"
+ "testing/fstest"
+
+ "github.com/hairyhenderson/go-fsimpl"
+ "github.com/hairyhenderson/go-fsimpl/httpfs"
+ "github.com/hairyhenderson/gomplate/v4/internal/config"
+ "github.com/hairyhenderson/gomplate/v4/internal/iohelpers"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+const osWindows = "windows"
+
+func TestResolveURL(t *testing.T) {
+ out, err := resolveURL(mustParseURL("http://example.com/foo.json"), "bar.json")
+ require.NoError(t, err)
+ assert.Equal(t, "http://example.com/bar.json", out.String())
+
+ out, err = resolveURL(mustParseURL("http://example.com/a/b/?n=2"), "bar.json?q=1")
+ require.NoError(t, err)
+ assert.Equal(t, "http://example.com/a/b/bar.json?n=2&q=1", out.String())
+
+ out, err = resolveURL(mustParseURL("git+file:///tmp/myrepo"), "//myfile?type=application/json")
+ require.NoError(t, err)
+ assert.Equal(t, "git+file:///tmp/myrepo//myfile?type=application/json", out.String())
+
+ out, err = resolveURL(mustParseURL("git+file:///tmp/foo/bar/"), "//myfile?type=application/json")
+ require.NoError(t, err)
+ assert.Equal(t, "git+file:///tmp/foo/bar//myfile?type=application/json", out.String())
+
+ out, err = resolveURL(mustParseURL("git+file:///tmp/myrepo/"), ".//myfile?type=application/json")
+ require.NoError(t, err)
+ assert.Equal(t, "git+file:///tmp/myrepo//myfile?type=application/json", out.String())
+
+ out, err = resolveURL(mustParseURL("git+file:///tmp/repo//foo.txt"), "")
+ require.NoError(t, err)
+ assert.Equal(t, "git+file:///tmp/repo//foo.txt", out.String())
+
+ out, err = resolveURL(mustParseURL("git+file:///tmp/myrepo"), ".//myfile?type=application/json")
+ require.NoError(t, err)
+ assert.Equal(t, "git+file:///tmp/myrepo//myfile?type=application/json", out.String())
+
+ out, err = resolveURL(mustParseURL("git+file:///tmp/myrepo//foo/?type=application/json"), "bar/myfile")
+ require.NoError(t, err)
+ // note that the '/' in the query string is encoded to %2F - that's OK
+ assert.Equal(t, "git+file:///tmp/myrepo//foo/bar/myfile?type=application%2Fjson", out.String())
+
+ // both base and relative may not contain "//"
+ _, err = resolveURL(mustParseURL("git+ssh://git@example.com/foo//bar"), ".//myfile")
+ require.Error(t, err)
+
+ _, err = resolveURL(mustParseURL("git+ssh://git@example.com/foo//bar"), "baz//myfile")
+ require.Error(t, err)
+
+ // relative urls must remain relative
+ out, err = resolveURL(mustParseURL("tmp/foo.json"), "")
+ require.NoError(t, err)
+ assert.Equal(t, "tmp/foo.json", out.String())
+}
+
+func TestReadFileContent(t *testing.T) {
+ wd, _ := os.Getwd()
+ t.Cleanup(func() {
+ _ = os.Chdir(wd)
+ })
+ _ = os.Chdir("/")
+
+ mux := http.NewServeMux()
+ mux.HandleFunc("/foo.json", func(w http.ResponseWriter, _ *http.Request) {
+ w.Header().Set("Content-Type", iohelpers.JSONMimetype)
+ w.Write([]byte(`{"foo": "bar"}`))
+ })
+
+ srv := httptest.NewServer(mux)
+ t.Cleanup(srv.Close)
+
+ fsys := WrapWdFS(fstest.MapFS{
+ "foo.json": &fstest.MapFile{Data: []byte(`{"foo": "bar"}`)},
+ "dir/1.yaml": &fstest.MapFile{Data: []byte(`foo: bar`)},
+ "dir/2.yaml": &fstest.MapFile{Data: []byte(`baz: qux`)},
+ "dir/sub/sub1.yaml": &fstest.MapFile{Data: []byte(`quux: corge`)},
+ })
+
+ fsp := fsimpl.NewMux()
+ fsp.Add(httpfs.FS)
+ fsp.Add(WrappedFSProvider(fsys, "file", ""))
+
+ ctx := ContextWithFSProvider(context.Background(), fsp)
+
+ reg := NewRegistry()
+ sr := &dsReader{Registry: reg}
+
+ fc, err := sr.readFileContent(ctx, mustParseURL("file:///foo.json"), nil)
+ require.NoError(t, err)
+ assert.Equal(t, []byte(`{"foo": "bar"}`), fc.b)
+
+ fc, err = sr.readFileContent(ctx, mustParseURL("dir/"), nil)
+ require.NoError(t, err)
+ assert.JSONEq(t, `["1.yaml", "2.yaml", "sub"]`, string(fc.b))
+
+ fc, err = sr.readFileContent(ctx, mustParseURL(srv.URL+"/foo.json"), nil)
+ require.NoError(t, err)
+ assert.Equal(t, []byte(`{"foo": "bar"}`), fc.b)
+}
+
+func TestDatasource(t *testing.T) {
+ setup := func(ext string, contents []byte) (context.Context, *dsReader) {
+ fname := "foo." + ext
+ var uPath string
+ if runtime.GOOS == osWindows {
+ uPath = "C:/tmp/" + fname
+ } else {
+ uPath = "/tmp/" + fname
+ }
+
+ fsys := WrapWdFS(fstest.MapFS{
+ "tmp/" + fname: &fstest.MapFile{Data: contents},
+ })
+ ctx := ContextWithFSProvider(context.Background(), WrappedFSProvider(fsys, "file", ""))
+
+ reg := NewRegistry()
+ reg.Register("foo", config.DataSource{URL: &url.URL{Scheme: "file", Path: uPath}})
+
+ return ctx, &dsReader{Registry: reg}
+ }
+
+ test := func(ext, mime string, contents []byte) {
+ ctx, data := setup(ext, contents)
+
+ ct, b, err := data.ReadSource(ctx, "foo", "?type="+mime)
+ require.NoError(t, err)
+ assert.Equal(t, contents, b)
+ assert.Equal(t, mime, ct)
+ }
+
+ testObj := func(ext, mime string, contents []byte) {
+ test(ext, mime, contents)
+ }
+
+ testObj("json", iohelpers.JSONMimetype, []byte(`{"hello":{"cruel":"world"}}`))
+ testObj("yml", iohelpers.YAMLMimetype, []byte("hello:\n cruel: world\n"))
+ test("json", iohelpers.JSONMimetype, []byte(`[1, "two", true]`))
+ test("yaml", iohelpers.YAMLMimetype, []byte("---\n- 1\n- two\n- true\n"))
+
+ ctx, d := setup("", nil)
+ ct, b, err := d.ReadSource(ctx, "foo")
+ require.NoError(t, err)
+ assert.Empty(t, b)
+ assert.Equal(t, iohelpers.TextMimetype, ct)
+
+ _, _, err = d.ReadSource(ctx, "bar")
+ require.Error(t, err)
+}
diff --git a/internal/datafs/registry.go b/internal/datafs/registry.go
new file mode 100644
index 00000000..905f5c4b
--- /dev/null
+++ b/internal/datafs/registry.go
@@ -0,0 +1,88 @@
+package datafs
+
+import (
+ "net/http"
+ "sort"
+ "sync"
+
+ "github.com/hairyhenderson/gomplate/v4/internal/config"
+)
+
+// Registry - a registry of datasources
+type Registry interface {
+ // Register a datasource
+ Register(alias string, ds config.DataSource)
+ // Lookup a registered datasource
+ Lookup(alias string) (config.DataSource, bool)
+ // List registered datasource aliases
+ List() []string
+
+ // Add extra headers not attached to a pre-defined datasource. These can be
+ // used by datasources registered at runtime.
+ AddExtraHeader(alias string, hdr http.Header)
+}
+
+func NewRegistry() Registry {
+ return &dsRegistry{
+ RWMutex: &sync.RWMutex{},
+ m: map[string]config.DataSource{},
+ extraHeaders: map[string]http.Header{},
+ }
+}
+
+type dsRegistry struct {
+ *sync.RWMutex
+ m map[string]config.DataSource
+ extraHeaders map[string]http.Header
+}
+
+// Register a datasource
+func (r *dsRegistry) Register(alias string, ds config.DataSource) {
+ r.Lock()
+ defer r.Unlock()
+
+ // if there's an extra header for this datasource, and the datasource
+ // doesn't have a header, add it now
+ if hdr, ok := r.extraHeaders[alias]; ok && ds.Header == nil {
+ ds.Header = hdr
+ }
+
+ r.m[alias] = ds
+}
+
+// Lookup a registered datasource
+func (r *dsRegistry) Lookup(alias string) (config.DataSource, bool) {
+ r.RLock()
+ defer r.RUnlock()
+
+ ds, ok := r.m[alias]
+ if !ok {
+ return ds, ok
+ }
+
+ return ds, ok
+}
+
+// List registered datasource aliases
+func (r *dsRegistry) List() []string {
+ r.RLock()
+ defer r.RUnlock()
+
+ keys := make([]string, 0, len(r.m))
+ for k := range r.m {
+ keys = append(keys, k)
+ }
+
+ sort.Strings(keys)
+
+ return keys
+}
+
+// AddExtraHeader adds extra headers not attached (yet) to a datasource. These will be added
+// to the headers of any matching datasource when Lookup is called.
+func (r *dsRegistry) AddExtraHeader(alias string, hdr http.Header) {
+ r.Lock()
+ defer r.Unlock()
+
+ r.extraHeaders[alias] = hdr
+}
diff --git a/internal/datafs/registry_test.go b/internal/datafs/registry_test.go
new file mode 100644
index 00000000..ad81d2fe
--- /dev/null
+++ b/internal/datafs/registry_test.go
@@ -0,0 +1,48 @@
+package datafs
+
+import (
+ "net/http"
+ "testing"
+
+ "github.com/hairyhenderson/gomplate/v4/internal/config"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDefaultRegistry(t *testing.T) {
+ reg := NewRegistry()
+ ds := config.DataSource{}
+ reg.Register("foo", ds)
+
+ actual, ok := reg.Lookup("foo")
+ require.True(t, ok)
+ require.Equal(t, ds, actual)
+
+ _, ok = reg.Lookup("bar")
+ require.False(t, ok)
+}
+
+func TestDefaultRegistry_List(t *testing.T) {
+ reg := NewRegistry()
+ ds := config.DataSource{}
+ reg.Register("a", ds)
+ reg.Register("b", ds)
+ reg.Register("c", ds)
+ reg.Register("d", ds)
+
+ actual := reg.List()
+
+ // list must be sorted
+ require.Equal(t, []string{"a", "b", "c", "d"}, actual)
+}
+
+func TestDefaultRegistry_AddExtraHeader(t *testing.T) {
+ reg := NewRegistry()
+ hdr := http.Header{"foo": {"bar"}}
+ reg.AddExtraHeader("baz", hdr)
+
+ reg.Register("baz", config.DataSource{})
+
+ ds, ok := reg.Lookup("baz")
+ require.True(t, ok)
+ require.Equal(t, hdr, ds.Header)
+}
diff --git a/internal/funcs/data.go b/internal/funcs/data.go
index 1adb90aa..085dbc54 100644
--- a/internal/funcs/data.go
+++ b/internal/funcs/data.go
@@ -4,22 +4,12 @@ import (
"context"
"github.com/hairyhenderson/gomplate/v4/conv"
- "github.com/hairyhenderson/gomplate/v4/data"
"github.com/hairyhenderson/gomplate/v4/internal/parsers"
)
// CreateDataFuncs -
-//
-//nolint:staticcheck
-func CreateDataFuncs(ctx context.Context, d *data.Data) map[string]interface{} {
+func CreateDataFuncs(ctx context.Context) map[string]interface{} {
f := map[string]interface{}{}
- f["datasource"] = d.Datasource
- f["ds"] = d.Datasource
- f["datasourceExists"] = d.DatasourceExists
- f["datasourceReachable"] = d.DatasourceReachable
- f["defineDatasource"] = d.DefineDatasource
- f["include"] = d.Include
- f["listDatasources"] = d.ListDatasources
ns := &DataFuncs{ctx}
diff --git a/internal/funcs/data_test.go b/internal/funcs/data_test.go
index c536beb3..b01530b6 100644
--- a/internal/funcs/data_test.go
+++ b/internal/funcs/data_test.go
@@ -17,7 +17,7 @@ func TestCreateDataFuncs(t *testing.T) {
t.Parallel()
ctx := context.Background()
- fmap := CreateDataFuncs(ctx, nil)
+ fmap := CreateDataFuncs(ctx)
actual := fmap["data"].(func() interface{})
assert.Equal(t, ctx, actual().(*DataFuncs).ctx)
diff --git a/internal/funcs/datasource.go b/internal/funcs/datasource.go
new file mode 100644
index 00000000..6c4badf8
--- /dev/null
+++ b/internal/funcs/datasource.go
@@ -0,0 +1,103 @@
+package funcs
+
+import (
+ "context"
+ "fmt"
+ "log/slog"
+
+ "github.com/hairyhenderson/gomplate/v4/internal/config"
+ "github.com/hairyhenderson/gomplate/v4/internal/datafs"
+ "github.com/hairyhenderson/gomplate/v4/internal/parsers"
+ "github.com/hairyhenderson/gomplate/v4/internal/urlhelpers"
+)
+
+// CreateDataSourceFuncs -
+func CreateDataSourceFuncs(ctx context.Context, sr datafs.DataSourceReader) map[string]interface{} {
+ ns := &dataSourceFuncs{
+ ctx: ctx,
+ sr: sr,
+ }
+
+ f := map[string]interface{}{}
+
+ // undocumented but available
+ f["_datasource"] = func() interface{} { return ns }
+
+ f["datasource"] = ns.Datasource
+ f["ds"] = ns.Datasource
+ f["datasourceExists"] = ns.DatasourceExists
+ f["datasourceReachable"] = ns.DatasourceReachable
+ f["defineDatasource"] = ns.DefineDatasource
+ f["include"] = ns.Include
+ f["listDatasources"] = ns.ListDatasources
+
+ return f
+}
+
+// dataSourceFuncs - datasource reading functions
+type dataSourceFuncs struct {
+ ctx context.Context
+ sr datafs.DataSourceReader
+}
+
+// Include - Reads from the named datasource, without parsing the data, which
+// is returned as a string.
+func (d *dataSourceFuncs) Include(alias string, args ...string) (string, error) {
+ _, b, err := d.sr.ReadSource(d.ctx, alias, args...)
+ if err != nil {
+ return "", err
+ }
+
+ return string(b), err
+}
+
+// Datasource - Reads from the named datasource, and returns the parsed datafs.
+func (d *dataSourceFuncs) Datasource(alias string, args ...string) (interface{}, error) {
+ ct, b, err := d.sr.ReadSource(d.ctx, alias, args...)
+ if err != nil {
+ return nil, err
+ }
+
+ return parsers.ParseData(ct, string(b))
+}
+
+// DefineDatasource -
+func (d *dataSourceFuncs) DefineDatasource(alias, value string) (string, error) {
+ if alias == "" {
+ return "", fmt.Errorf("datasource alias must be provided")
+ }
+ if d.DatasourceExists(alias) {
+ slog.DebugContext(d.ctx, "defineDatasource: ignoring attempt to redefine datasource", "alias", alias)
+ return "", nil
+ }
+ srcURL, err := urlhelpers.ParseSourceURL(value)
+ if err != nil {
+ return "", fmt.Errorf("parse datasource URL: %w", err)
+ }
+
+ d.sr.Register(alias, config.DataSource{URL: srcURL})
+ return "", nil
+}
+
+// DatasourceExists -
+func (d *dataSourceFuncs) DatasourceExists(alias string) bool {
+ _, ok := d.sr.Lookup(alias)
+ return ok
+}
+
+// DatasourceReachable - Determines if the named datasource is reachable with
+// the given arguments. Reads from the datasource, and discards the returned datafs.
+func (d *dataSourceFuncs) DatasourceReachable(alias string, args ...string) bool {
+ // first, if the datasource doesn't exist, we can't reach it
+ if !d.DatasourceExists(alias) {
+ return false
+ }
+
+ _, _, err := d.sr.ReadSource(d.ctx, alias, args...)
+ return err == nil
+}
+
+// Show all datasources -
+func (d *dataSourceFuncs) ListDatasources() []string {
+ return d.sr.List()
+}
diff --git a/internal/funcs/datasource_test.go b/internal/funcs/datasource_test.go
new file mode 100644
index 00000000..3c5f5f7f
--- /dev/null
+++ b/internal/funcs/datasource_test.go
@@ -0,0 +1,217 @@
+package funcs
+
+import (
+ "context"
+ "net/url"
+ "runtime"
+ "strconv"
+ "testing"
+ "testing/fstest"
+
+ "github.com/hairyhenderson/gomplate/v4/internal/config"
+ "github.com/hairyhenderson/gomplate/v4/internal/datafs"
+ "github.com/hairyhenderson/gomplate/v4/internal/iohelpers"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCreateDataSourceFuncs(t *testing.T) {
+ t.Parallel()
+
+ for i := 0; i < 10; i++ {
+ // Run this a bunch to catch race conditions
+ t.Run(strconv.Itoa(i), func(t *testing.T) {
+ t.Parallel()
+
+ ctx := context.Background()
+ fmap := CreateDataSourceFuncs(ctx, nil)
+ actual := fmap["_datasource"].(func() interface{})
+
+ assert.Equal(t, ctx, actual().(*dataSourceFuncs).ctx)
+ })
+ }
+}
+
+const osWindows = "windows"
+
+func TestDatasource(t *testing.T) {
+ setup := func(ext string, contents []byte) *dataSourceFuncs {
+ fname := "foo." + ext
+ var uPath string
+ if runtime.GOOS == osWindows {
+ uPath = "C:/tmp/" + fname
+ } else {
+ uPath = "/tmp/" + fname
+ }
+
+ fsys := datafs.WrapWdFS(fstest.MapFS{
+ "tmp/" + fname: &fstest.MapFile{Data: contents},
+ })
+ ctx := datafs.ContextWithFSProvider(context.Background(), datafs.WrappedFSProvider(fsys, "file", ""))
+
+ reg := datafs.NewRegistry()
+ reg.Register("foo", config.DataSource{URL: &url.URL{Scheme: "file", Path: uPath}})
+
+ d := &dataSourceFuncs{sr: datafs.NewSourceReader(reg), ctx: ctx}
+ return d
+ }
+
+ test := func(ext, mime string, contents []byte, expected interface{}) {
+ data := setup(ext, contents)
+
+ actual, err := data.Datasource("foo", "?type="+mime)
+ require.NoError(t, err)
+ assert.Equal(t, expected, actual)
+ }
+
+ testObj := func(ext, mime string, contents []byte) {
+ test(ext, mime, contents,
+ map[string]interface{}{
+ "hello": map[string]interface{}{"cruel": "world"},
+ })
+ }
+
+ testObj("json", iohelpers.JSONMimetype, []byte(`{"hello":{"cruel":"world"}}`))
+ testObj("yml", iohelpers.YAMLMimetype, []byte("hello:\n cruel: world\n"))
+ test("json", iohelpers.JSONMimetype, []byte(`[1, "two", true]`),
+ []interface{}{1, "two", true})
+ test("yaml", iohelpers.YAMLMimetype, []byte("---\n- 1\n- two\n- true\n"),
+ []interface{}{1, "two", true})
+
+ d := setup("", nil)
+ actual, err := d.Datasource("foo")
+ require.NoError(t, err)
+ assert.Equal(t, "", actual)
+
+ _, err = d.Datasource("bar")
+ require.Error(t, err)
+}
+
+func TestDatasourceReachable(t *testing.T) {
+ fname := "foo.json"
+ var uPath string
+ if runtime.GOOS == osWindows {
+ uPath = "C:/tmp/" + fname
+ } else {
+ uPath = "/tmp/" + fname
+ }
+
+ fsys := datafs.WrapWdFS(fstest.MapFS{
+ "tmp/" + fname: &fstest.MapFile{Data: []byte("{}")},
+ })
+ ctx := datafs.ContextWithFSProvider(context.Background(), datafs.WrappedFSProvider(fsys, "file", ""))
+
+ reg := datafs.NewRegistry()
+ reg.Register("foo", config.DataSource{URL: &url.URL{Scheme: "file", Path: uPath}})
+ reg.Register("bar", config.DataSource{URL: &url.URL{Scheme: "file", Path: "/bogus"}})
+
+ data := &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+ data.ctx = ctx
+
+ assert.True(t, data.DatasourceReachable("foo"))
+ assert.False(t, data.DatasourceReachable("bar"))
+}
+
+func TestDatasourceExists(t *testing.T) {
+ reg := datafs.NewRegistry()
+ reg.Register("foo", config.DataSource{})
+ data := &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+
+ assert.True(t, data.DatasourceExists("foo"))
+ assert.False(t, data.DatasourceExists("bar"))
+}
+
+func TestInclude(t *testing.T) {
+ ext := "txt"
+ contents := "hello world"
+ fname := "foo." + ext
+
+ var uPath string
+ if runtime.GOOS == osWindows {
+ uPath = "C:/tmp/" + fname
+ } else {
+ uPath = "/tmp/" + fname
+ }
+
+ fsys := datafs.WrapWdFS(fstest.MapFS{
+ "tmp/" + fname: &fstest.MapFile{Data: []byte(contents)},
+ })
+ ctx := datafs.ContextWithFSProvider(context.Background(), datafs.WrappedFSProvider(fsys, "file", ""))
+
+ reg := datafs.NewRegistry()
+ reg.Register("foo", config.DataSource{URL: &url.URL{Scheme: "file", Path: uPath}})
+
+ data := &dataSourceFuncs{sr: datafs.NewSourceReader(reg), ctx: ctx}
+
+ actual, err := data.Include("foo")
+ require.NoError(t, err)
+ assert.Equal(t, contents, actual)
+}
+
+func TestDefineDatasource(t *testing.T) {
+ reg := datafs.NewRegistry()
+ d := &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+ _, err := d.DefineDatasource("", "foo.json")
+ require.Error(t, err)
+
+ d = &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+ _, err = d.DefineDatasource("", "../foo.json")
+ require.Error(t, err)
+
+ d = &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+ _, err = d.DefineDatasource("", "ftp://example.com/foo.yml")
+ require.Error(t, err)
+
+ reg = datafs.NewRegistry()
+ d = &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+ _, err = d.DefineDatasource("data", "foo.json")
+ s, _ := reg.Lookup("data")
+ require.NoError(t, err)
+ assert.EqualValues(t, &url.URL{Path: "foo.json"}, s.URL)
+
+ reg = datafs.NewRegistry()
+ d = &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+ _, err = d.DefineDatasource("data", "/otherdir/foo.json")
+ s, _ = reg.Lookup("data")
+ require.NoError(t, err)
+ assert.Equal(t, "file", s.URL.Scheme)
+ assert.True(t, s.URL.IsAbs())
+ assert.Equal(t, "/otherdir/foo.json", s.URL.Path)
+
+ reg = datafs.NewRegistry()
+ d = &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+ _, err = d.DefineDatasource("data", "sftp://example.com/blahblah/foo.json")
+ s, _ = reg.Lookup("data")
+ require.NoError(t, err)
+ assert.Equal(t, "sftp", s.URL.Scheme)
+ assert.True(t, s.URL.IsAbs())
+ assert.Equal(t, "/blahblah/foo.json", s.URL.Path)
+
+ reg = datafs.NewRegistry()
+ reg.Register("data", config.DataSource{})
+ d = &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+ _, err = d.DefineDatasource("data", "/otherdir/foo.json")
+ s, _ = reg.Lookup("data")
+ require.NoError(t, err)
+ assert.Nil(t, s.URL)
+
+ reg = datafs.NewRegistry()
+ d = &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+ _, err = d.DefineDatasource("data", "/otherdir/foo?type=application/x-env")
+ require.NoError(t, err)
+
+ s, _ = reg.Lookup("data")
+ require.NotNil(t, s)
+ assert.Equal(t, "/otherdir/foo", s.URL.Path)
+}
+
+func TestListDatasources(t *testing.T) {
+ reg := datafs.NewRegistry()
+ reg.Register("foo", config.DataSource{})
+ reg.Register("bar", config.DataSource{})
+
+ d := &dataSourceFuncs{sr: datafs.NewSourceReader(reg)}
+
+ assert.Equal(t, []string{"bar", "foo"}, d.ListDatasources())
+}
diff --git a/data/mimetypes_test.go b/internal/iohelpers/mimetypes_test.go
index 04c54439..a1ec8323 100644
--- a/data/mimetypes_test.go
+++ b/internal/iohelpers/mimetypes_test.go
@@ -1,4 +1,4 @@
-package data
+package iohelpers
import (
"testing"
@@ -11,12 +11,12 @@ func TestMimeAlias(t *testing.T) {
data := []struct {
in, out string
}{
- {csvMimetype, csvMimetype},
- {yamlMimetype, yamlMimetype},
- {"application/x-yaml", yamlMimetype},
+ {CSVMimetype, CSVMimetype},
+ {YAMLMimetype, YAMLMimetype},
+ {"application/x-yaml", YAMLMimetype},
}
for _, d := range data {
- assert.Equal(t, d.out, mimeAlias(d.in))
+ assert.Equal(t, d.out, MimeAlias(d.in))
}
}
diff --git a/internal/tests/integration/datasources_vault_ec2_test.go b/internal/tests/integration/datasources_vault_ec2_test.go
index 9e4a5192..c00a8d43 100644
--- a/internal/tests/integration/datasources_vault_ec2_test.go
+++ b/internal/tests/integration/datasources_vault_ec2_test.go
@@ -10,6 +10,7 @@ import (
"net/http/httptest"
"testing"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gotest.tools/v3/fs"
)
@@ -28,7 +29,10 @@ func setupDatasourcesVaultEc2Test(t *testing.T) (*fs.Dir, *vaultClient, *httptes
if r.Body != nil {
var err error
b, err = io.ReadAll(r.Body)
- require.NoError(t, err)
+ if !assert.NoError(t, err) {
+ w.WriteHeader(http.StatusInternalServerError)
+ return
+ }
defer r.Body.Close()
}
t.Logf("IMDS Token request: %s %s: %s", r.Method, r.URL, b)
diff --git a/internal/tests/integration/datasources_vault_test.go b/internal/tests/integration/datasources_vault_test.go
index 448fa759..43b6f502 100644
--- a/internal/tests/integration/datasources_vault_test.go
+++ b/internal/tests/integration/datasources_vault_test.go
@@ -130,7 +130,7 @@ func TestDatasources_Vault_TokenAuth(t *testing.T) {
withEnv("VAULT_ADDR", "http://"+v.addr).
withEnv("VAULT_TOKEN", tok).
run()
- assert.ErrorContains(t, err, "error calling ds: couldn't read datasource 'vault':")
+ assert.ErrorContains(t, err, "error calling ds: couldn't read datasource 'vault'")
assert.ErrorContains(t, err, "stat secret/bar")
assert.ErrorContains(t, err, "file does not exist")
diff --git a/render.go b/render.go
index 1978f4fe..fe0d7f74 100644
--- a/render.go
+++ b/render.go
@@ -12,9 +12,9 @@ import (
"github.com/hairyhenderson/go-fsimpl"
"github.com/hairyhenderson/go-fsimpl/autofs"
- "github.com/hairyhenderson/gomplate/v4/data"
"github.com/hairyhenderson/gomplate/v4/internal/config"
"github.com/hairyhenderson/gomplate/v4/internal/datafs"
+ "github.com/hairyhenderson/gomplate/v4/internal/funcs"
)
// Options for template rendering.
@@ -108,8 +108,7 @@ type Datasource struct {
//
// Experimental: subject to breaking changes before the next major release
type Renderer struct {
- //nolint:staticcheck
- data *data.Data
+ sr datafs.DataSourceReader
fsp fsimpl.FSProvider
nested config.Templates
funcs template.FuncMap
@@ -129,21 +128,23 @@ func NewRenderer(opts Options) *Renderer {
Metrics = newMetrics()
}
+ // this should be the only place where this registry is created
+ reg := datafs.NewRegistry()
+
tctxAliases := []string{}
- sources := map[string]config.DataSource{}
for alias, ds := range opts.Context {
tctxAliases = append(tctxAliases, alias)
- sources[alias] = config.DataSource{
+ reg.Register(alias, config.DataSource{
URL: ds.URL,
Header: ds.Header,
- }
+ })
}
for alias, ds := range opts.Datasources {
- sources[alias] = config.DataSource{
+ reg.Register(alias, config.DataSource{
URL: ds.URL,
Header: ds.Header,
- }
+ })
}
// convert the internal config.Templates to a map[string]Datasource
@@ -156,10 +157,8 @@ func NewRenderer(opts Options) *Renderer {
}
}
- //nolint:staticcheck
- d := &data.Data{
- ExtraHeaders: opts.ExtraHeaders,
- Sources: sources,
+ for k := range opts.ExtraHeaders {
+ reg.AddExtraHeader(k, opts.ExtraHeaders[k])
}
if opts.Funcs == nil {
@@ -175,9 +174,12 @@ func NewRenderer(opts Options) *Renderer {
opts.FSProvider = DefaultFSProvider
}
+ // TODO: move this in?
+ sr := datafs.NewSourceReader(reg)
+
return &Renderer{
nested: nested,
- data: d,
+ sr: sr,
funcs: opts.Funcs,
tctxAliases: tctxAliases,
lDelim: opts.LDelim,
@@ -213,7 +215,7 @@ func (t *Renderer) RenderTemplates(ctx context.Context, templates []Template) er
// configure the template context with the refreshed Data value
// only done here because the data context may have changed
- tmplctx, err := createTmplContext(ctx, t.tctxAliases, t.data)
+ tmplctx, err := createTmplContext(ctx, t.tctxAliases, t.sr)
if err != nil {
return err
}
@@ -224,7 +226,10 @@ func (t *Renderer) RenderTemplates(ctx context.Context, templates []Template) er
func (t *Renderer) renderTemplatesWithData(ctx context.Context, templates []Template, tmplctx interface{}) error {
// update funcs with the current context
// only done here to ensure the context is properly set in func namespaces
- f := CreateFuncs(ctx, t.data)
+ f := CreateFuncs(ctx)
+
+ // add datasource funcs here because they need to share the source reader
+ addToMap(f, funcs.CreateDataSourceFuncs(ctx, t.sr))
// add user-defined funcs last so they override the built-in funcs
addToMap(f, t.funcs)
@@ -252,7 +257,7 @@ func (t *Renderer) renderTemplate(ctx context.Context, template Template, f temp
tmpl, err := parseTemplate(ctx, template.Name, template.Text,
f, tmplctx, t.nested, t.lDelim, t.rDelim, t.missingKey)
if err != nil {
- return err
+ return fmt.Errorf("parse template %s: %w", template.Name, err)
}
err = tmpl.Execute(template.Writer, tmplctx)