From 47b74a5505d4c9979d24a8bcffde711a60c5f23a Mon Sep 17 00:00:00 2001 From: Dave Henderson Date: Sun, 9 Jun 2024 19:25:17 -0400 Subject: chore(api)!: Overhauling config and rendering types (#2094) Signed-off-by: Dave Henderson --- config.go | 657 +++++++++++++++++++++++++++ config_test.go | 891 +++++++++++++++++++++++++++++++++++++ context_test.go | 5 +- gomplate.go | 49 +- gomplate_test.go | 24 +- internal/cmd/config.go | 46 +- internal/cmd/config_test.go | 97 ++-- internal/cmd/main.go | 12 +- internal/config/configfile.go | 602 ------------------------- internal/config/configfile_test.go | 873 ------------------------------------ internal/config/types.go | 60 ++- internal/config/types_test.go | 91 ---- internal/datafs/context.go | 5 + plugins.go | 3 +- plugins_test.go | 7 +- render.go | 134 ++---- render_test.go | 22 +- template.go | 28 +- template_test.go | 17 +- template_unix_test.go | 3 +- template_windows_test.go | 3 +- 21 files changed, 1837 insertions(+), 1792 deletions(-) create mode 100644 config.go create mode 100644 config_test.go delete mode 100644 internal/config/configfile.go delete mode 100644 internal/config/configfile_test.go delete mode 100644 internal/config/types_test.go diff --git a/config.go b/config.go new file mode 100644 index 00000000..72d65f5b --- /dev/null +++ b/config.go @@ -0,0 +1,657 @@ +package gomplate + +import ( + "fmt" + "io" + "net/http" + "os" + "path" + "slices" + "strconv" + "strings" + "time" + + "github.com/hairyhenderson/gomplate/v4/internal/config" + "github.com/hairyhenderson/gomplate/v4/internal/iohelpers" + "github.com/hairyhenderson/gomplate/v4/internal/urlhelpers" + "github.com/hairyhenderson/yaml" +) + +// Parse a config file +func Parse(in io.Reader) (*Config, error) { + out := &Config{} + dec := yaml.NewDecoder(in) + err := dec.Decode(out) + if err != nil && err != io.EOF { + return out, fmt.Errorf("YAML decoding failed, syntax may be invalid: %w", err) + } + return out, nil +} + +// Config models gomplate's configuration file and command-line options. It +// also contains some fields that can't be set in the config file. +type Config struct { + // Stdin - override for stdin:// URLs or the '-' input file. Can't be set in + // the config file. + // Usually this should be left as default - this will be set at runtime. + Stdin io.Reader `yaml:"-"` + + // Stdout - override for the '-' output file. Can't be set in the config + // file. + // Usually this should be left as default - this will be set at runtime. + Stdout io.Writer `yaml:"-"` + + // Stderr - override for plugins to write to stderr. Can't be set in the + // config file. + // Usually this should be left as default - this will be set at runtime. + Stderr io.Writer `yaml:"-"` + + // ExtraHeaders - Extra HTTP headers not attached to pre-defined datsources. + // Potentially used by datasources defined in the template at runtime. Can't + // currently be set in the config file. + ExtraHeaders map[string]http.Header `yaml:"-"` + + DataSources map[string]DataSource `yaml:"datasources,omitempty"` + Context map[string]DataSource `yaml:"context,omitempty"` + Templates map[string]DataSource `yaml:"templates,omitempty"` + Plugins map[string]PluginConfig `yaml:"plugins,omitempty"` + + Input string `yaml:"in,omitempty"` + InputDir string `yaml:"inputDir,omitempty"` + InputFiles []string `yaml:"inputFiles,omitempty,flow"` + ExcludeGlob []string `yaml:"excludes,omitempty"` + ExcludeProcessingGlob []string `yaml:"excludeProcessing,omitempty"` + + OutputDir string `yaml:"outputDir,omitempty"` + OutputMap string `yaml:"outputMap,omitempty"` + OutputFiles []string `yaml:"outputFiles,omitempty,flow"` + OutMode string `yaml:"chmod,omitempty"` + + LDelim string `yaml:"leftDelim,omitempty"` + RDelim string `yaml:"rightDelim,omitempty"` + + MissingKey string `yaml:"missingKey,omitempty"` + + PostExec []string `yaml:"postExec,omitempty,flow"` + + PluginTimeout time.Duration `yaml:"pluginTimeout,omitempty"` + + ExecPipe bool `yaml:"execPipe,omitempty"` + Experimental bool `yaml:"experimental,omitempty"` +} + +// TODO: remove when we remove the deprecated array format for templates +type rawConfig struct { + DataSources map[string]DataSource `yaml:"datasources,omitempty"` + Context map[string]DataSource `yaml:"context,omitempty"` + Templates config.Templates `yaml:"templates,omitempty"` + Plugins map[string]PluginConfig `yaml:"plugins,omitempty"` + + Input string `yaml:"in,omitempty"` + InputDir string `yaml:"inputDir,omitempty"` + InputFiles []string `yaml:"inputFiles,omitempty,flow"` + ExcludeGlob []string `yaml:"excludes,omitempty"` + ExcludeProcessingGlob []string `yaml:"excludeProcessing,omitempty"` + + OutputDir string `yaml:"outputDir,omitempty"` + OutputMap string `yaml:"outputMap,omitempty"` + OutputFiles []string `yaml:"outputFiles,omitempty,flow"` + OutMode string `yaml:"chmod,omitempty"` + + LDelim string `yaml:"leftDelim,omitempty"` + RDelim string `yaml:"rightDelim,omitempty"` + + MissingKey string `yaml:"missingKey,omitempty"` + + PostExec []string `yaml:"postExec,omitempty,flow"` + + PluginTimeout time.Duration `yaml:"pluginTimeout,omitempty"` + + ExecPipe bool `yaml:"execPipe,omitempty"` + Experimental bool `yaml:"experimental,omitempty"` +} + +// TODO: remove when we remove the deprecated array format for templates +// +// Deprecated: custom unmarshaling will be removed in the next version +func (c *Config) UnmarshalYAML(value *yaml.Node) error { + r := rawConfig{} + err := value.Decode(&r) + if err != nil { + return err + } + + *c = Config{ + DataSources: r.DataSources, + Context: r.Context, + Templates: r.Templates, + Plugins: r.Plugins, + Input: r.Input, + InputDir: r.InputDir, + InputFiles: r.InputFiles, + ExcludeGlob: r.ExcludeGlob, + ExcludeProcessingGlob: r.ExcludeProcessingGlob, + OutputDir: r.OutputDir, + OutputMap: r.OutputMap, + OutputFiles: r.OutputFiles, + OutMode: r.OutMode, + LDelim: r.LDelim, + RDelim: r.RDelim, + MissingKey: r.MissingKey, + PostExec: r.PostExec, + PluginTimeout: r.PluginTimeout, + ExecPipe: r.ExecPipe, + Experimental: r.Experimental, + } + + return nil +} + +// TODO: remove when we remove the deprecated array format for templates +// +// Deprecated: custom unmarshaling will be removed in the next version +func (c Config) MarshalYAML() (interface{}, error) { + aux := rawConfig{ + DataSources: c.DataSources, + Context: c.Context, + Templates: c.Templates, + Plugins: c.Plugins, + Input: c.Input, + InputDir: c.InputDir, + InputFiles: c.InputFiles, + ExcludeGlob: c.ExcludeGlob, + ExcludeProcessingGlob: c.ExcludeProcessingGlob, + OutputDir: c.OutputDir, + OutputMap: c.OutputMap, + OutputFiles: c.OutputFiles, + OutMode: c.OutMode, + LDelim: c.LDelim, + RDelim: c.RDelim, + MissingKey: c.MissingKey, + PostExec: c.PostExec, + PluginTimeout: c.PluginTimeout, + ExecPipe: c.ExecPipe, + Experimental: c.Experimental, + } + + return aux, nil +} + +// mergeDataSourceMaps - use d as defaults, and override with values from o +func mergeDataSourceMaps(d, o map[string]DataSource) map[string]DataSource { + for k, v := range o { + c, ok := d[k] + if ok { + d[k] = mergeDataSources(c, v) + } else { + d[k] = v + } + } + return d +} + +// mergeDataSources - use left as default, and override with values from right +func mergeDataSources(left, right DataSource) DataSource { + if right.URL != nil { + left.URL = right.URL + } + if left.Header == nil { + left.Header = right.Header + } else { + for k, v := range right.Header { + left.Header[k] = v + } + } + return left +} + +// DataSource - datasource configuration +type DataSource = config.DataSource + +type PluginConfig struct { + Cmd string + Args []string `yaml:"args,omitempty"` + Timeout time.Duration `yaml:"timeout,omitempty"` + Pipe bool `yaml:"pipe,omitempty"` +} + +// UnmarshalYAML - satisfy the yaml.Umarshaler interface - plugin configs can +// either be a plain string (to specify only the name), or a map with a name, +// timeout, and pipe flag. +func (p *PluginConfig) UnmarshalYAML(value *yaml.Node) error { + if value.Kind == yaml.ScalarNode { + s := "" + err := value.Decode(&s) + if err != nil { + return err + } + + *p = PluginConfig{Cmd: s} + return nil + } + + if value.Kind != yaml.MappingNode { + return fmt.Errorf("plugin config must be a string or map") + } + + type raw struct { + Cmd string + Args []string + Timeout time.Duration + Pipe bool + } + r := raw{} + err := value.Decode(&r) + if err != nil { + return err + } + + *p = PluginConfig(r) + + return nil +} + +// MergeFrom - use this Config as the defaults, and override it with any +// non-zero values from the other Config +// +// Note that Input/InputDir/InputFiles will override each other, as well as +// OutputDir/OutputFiles. +func (c *Config) MergeFrom(o *Config) *Config { + switch { + case !isZero(o.Input): + c.Input = o.Input + c.InputDir = "" + c.InputFiles = nil + c.OutputDir = "" + case !isZero(o.InputDir): + c.Input = "" + c.InputDir = o.InputDir + c.InputFiles = nil + case !isZero(o.InputFiles): + if !(len(o.InputFiles) == 1 && o.InputFiles[0] == "-") { + c.Input = "" + c.InputFiles = o.InputFiles + c.InputDir = "" + c.OutputDir = "" + } + } + + if !isZero(o.OutputMap) { + c.OutputDir = "" + c.OutputFiles = nil + c.OutputMap = o.OutputMap + } + if !isZero(o.OutputDir) { + c.OutputDir = o.OutputDir + c.OutputFiles = nil + c.OutputMap = "" + } + if !isZero(o.OutputFiles) { + c.OutputDir = "" + c.OutputFiles = o.OutputFiles + c.OutputMap = "" + } + if !isZero(o.ExecPipe) { + c.ExecPipe = o.ExecPipe + c.PostExec = o.PostExec + c.OutputFiles = o.OutputFiles + } + if !isZero(o.ExcludeGlob) { + c.ExcludeGlob = o.ExcludeGlob + } + if !isZero(o.ExcludeProcessingGlob) { + c.ExcludeProcessingGlob = o.ExcludeProcessingGlob + } + if !isZero(o.OutMode) { + c.OutMode = o.OutMode + } + if !isZero(o.LDelim) { + c.LDelim = o.LDelim + } + if !isZero(o.RDelim) { + c.RDelim = o.RDelim + } + if c.Templates == nil { + c.Templates = o.Templates + } else { + c.Templates = mergeDataSourceMaps(c.Templates, o.Templates) + } + if c.DataSources == nil { + c.DataSources = o.DataSources + } else { + c.DataSources = mergeDataSourceMaps(c.DataSources, o.DataSources) + } + if c.Context == nil { + c.Context = o.Context + } else { + c.Context = mergeDataSourceMaps(c.Context, o.Context) + } + if len(o.Plugins) > 0 { + for k, v := range o.Plugins { + c.Plugins[k] = v + } + } + + return c +} + +// ParseDataSourceFlags - sets DataSources, Context, and Templates fields from +// the key=value format flags as provided at the command-line +// Unreferenced headers will be set in c.ExtraHeaders +func (c *Config) ParseDataSourceFlags(datasources, contexts, templates, headers []string) error { + err := c.parseResources(datasources, contexts, templates) + if err != nil { + return err + } + + hdrs, err := parseHeaderArgs(headers) + if err != nil { + return err + } + + for k, v := range hdrs { + if d, ok := c.Context[k]; ok { + d.Header = v + c.Context[k] = d + delete(hdrs, k) + } + if d, ok := c.DataSources[k]; ok { + d.Header = v + c.DataSources[k] = d + delete(hdrs, k) + } + if t, ok := c.Templates[k]; ok { + t.Header = v + c.Templates[k] = t + delete(hdrs, k) + } + } + if len(hdrs) > 0 { + c.ExtraHeaders = hdrs + } + return nil +} + +func (c *Config) parseResources(datasources, contexts, templates []string) error { + for _, d := range datasources { + k, ds, err := parseDatasourceArg(d) + if err != nil { + return err + } + if c.DataSources == nil { + c.DataSources = map[string]DataSource{} + } + c.DataSources[k] = ds + } + for _, d := range contexts { + k, ds, err := parseDatasourceArg(d) + if err != nil { + return err + } + if c.Context == nil { + c.Context = map[string]DataSource{} + } + c.Context[k] = ds + } + for _, t := range templates { + k, ds, err := parseTemplateArg(t) + if err != nil { + return err + } + if c.Templates == nil { + c.Templates = map[string]DataSource{} + } + c.Templates[k] = ds + } + + return nil +} + +// ParsePluginFlags - sets the Plugins field from the +// key=value format flags as provided at the command-line +func (c *Config) ParsePluginFlags(plugins []string) error { + for _, plugin := range plugins { + parts := strings.SplitN(plugin, "=", 2) + if len(parts) < 2 { + return fmt.Errorf("plugin requires both name and path") + } + if c.Plugins == nil { + c.Plugins = map[string]PluginConfig{} + } + c.Plugins[parts[0]] = PluginConfig{Cmd: parts[1]} + } + return nil +} + +func parseDatasourceArg(value string) (alias string, ds DataSource, err error) { + alias, u, _ := strings.Cut(value, "=") + if u == "" { + u = alias + alias, _, _ = strings.Cut(value, ".") + if path.Base(u) != u { + err = fmt.Errorf("invalid argument (%s): must provide an alias with files not in working directory", value) + return alias, ds, err + } + } + + ds.URL, err = urlhelpers.ParseSourceURL(u) + + return alias, ds, err +} + +func parseHeaderArgs(headerArgs []string) (map[string]http.Header, error) { + headers := make(map[string]http.Header) + for _, v := range headerArgs { + ds, name, value, err := splitHeaderArg(v) + if err != nil { + return nil, err + } + if _, ok := headers[ds]; !ok { + headers[ds] = make(http.Header) + } + headers[ds][name] = append(headers[ds][name], strings.TrimSpace(value)) + } + return headers, nil +} + +func splitHeaderArg(arg string) (datasourceAlias, name, value string, err error) { + parts := strings.SplitN(arg, "=", 2) + if len(parts) != 2 { + err = fmt.Errorf("invalid datasource-header option '%s'", arg) + return "", "", "", err + } + datasourceAlias = parts[0] + name, value, err = splitHeader(parts[1]) + return datasourceAlias, name, value, err +} + +func splitHeader(header string) (name, value string, err error) { + parts := strings.SplitN(header, ":", 2) + if len(parts) != 2 { + err = fmt.Errorf("invalid HTTP Header format '%s'", header) + return "", "", err + } + name = http.CanonicalHeaderKey(parts[0]) + value = parts[1] + return name, value, nil +} + +// Validate the Config +func (c Config) Validate() (err error) { + err = notTogether( + []string{"in", "inputFiles", "inputDir"}, + c.Input, c.InputFiles, c.InputDir) + if err == nil { + err = notTogether( + []string{"outputFiles", "outputDir", "outputMap"}, + c.OutputFiles, c.OutputDir, c.OutputMap) + } + if err == nil { + err = notTogether( + []string{"outputDir", "outputMap", "execPipe"}, + c.OutputDir, c.OutputMap, c.ExecPipe) + } + + if err == nil { + err = mustTogether("outputDir", "inputDir", + c.OutputDir, c.InputDir) + } + + if err == nil { + err = mustTogether("outputMap", "inputDir", + c.OutputMap, c.InputDir) + } + + if err == nil { + f := len(c.InputFiles) + if f == 0 && c.Input != "" { + f = 1 + } + o := len(c.OutputFiles) + if f != o && !c.ExecPipe { + err = fmt.Errorf("must provide same number of 'outputFiles' (%d) as 'in' or 'inputFiles' (%d) options", o, f) + } + } + + if err == nil { + if c.ExecPipe && len(c.PostExec) == 0 { + err = fmt.Errorf("execPipe may only be used with a postExec command") + } + } + + if err == nil { + if c.ExecPipe && (len(c.OutputFiles) > 0 && c.OutputFiles[0] != "-") { + err = fmt.Errorf("must not set 'outputFiles' when using 'execPipe'") + } + } + + if err == nil { + missingKeyValues := []string{"", "error", "zero", "default", "invalid"} + if !slices.Contains(missingKeyValues, c.MissingKey) { + err = fmt.Errorf("not allowed value for the 'missing-key' flag: %s. Allowed values: %s", c.MissingKey, strings.Join(missingKeyValues, ",")) + } + } + + return err +} + +func notTogether(names []string, values ...interface{}) error { + found := "" + for i, value := range values { + if isZero(value) { + continue + } + if found != "" { + return fmt.Errorf("only one of these options is supported at a time: '%s', '%s'", + found, names[i]) + } + found = names[i] + } + return nil +} + +func mustTogether(left, right string, lValue, rValue interface{}) error { + if !isZero(lValue) && isZero(rValue) { + return fmt.Errorf("these options must be set together: '%s', '%s'", + left, right) + } + + return nil +} + +func isZero(value interface{}) bool { + switch v := value.(type) { + case string: + return v == "" + case []string: + return len(v) == 0 + case bool: + return !v + default: + return false + } +} + +// ApplyDefaults - any defaults changed here should be added to cmd.InitFlags as +// well for proper help/usage display. +func (c *Config) ApplyDefaults() { + if c.Stdout == nil { + c.Stdout = os.Stdout + } + if c.Stderr == nil { + c.Stderr = os.Stderr + } + if c.Stdin == nil { + c.Stdin = os.Stdin + } + + if c.InputDir != "" && c.OutputDir == "" && c.OutputMap == "" { + c.OutputDir = "." + } + if c.Input == "" && c.InputDir == "" && len(c.InputFiles) == 0 { + c.InputFiles = []string{"-"} + } + if c.OutputDir == "" && c.OutputMap == "" && len(c.OutputFiles) == 0 { + c.OutputFiles = []string{"-"} + } + if c.LDelim == "" { + c.LDelim = "{{" + } + if c.RDelim == "" { + c.RDelim = "}}" + } + if c.MissingKey == "" { + c.MissingKey = "error" + } + + if c.PluginTimeout == 0 { + c.PluginTimeout = 5 * time.Second + } +} + +// GetMode - parse an os.FileMode out of the string, and let us know if it's an override or not... +func (c *Config) GetMode() (os.FileMode, bool, error) { + modeOverride := c.OutMode != "" + m, err := strconv.ParseUint("0"+c.OutMode, 8, 32) + if err != nil { + return 0, false, err + } + mode := iohelpers.NormalizeFileMode(os.FileMode(m)) + if mode == 0 && c.Input != "" { + mode = iohelpers.NormalizeFileMode(0o644) + } + return mode, modeOverride, nil +} + +// String - +func (c *Config) String() string { + out := &strings.Builder{} + out.WriteString("---\n") + enc := yaml.NewEncoder(out) + enc.SetIndent(2) + + // dereferenced copy so we can truncate input for display + c2 := *c + if len(c2.Input) >= 11 { + c2.Input = c2.Input[0:8] + "..." + } + + err := enc.Encode(c2) + if err != nil { + return err.Error() + } + return out.String() +} + +// -- + +func parseTemplateArg(value string) (alias string, ds DataSource, err error) { + alias, u, _ := strings.Cut(value, "=") + if u == "" { + u = alias + } + + ds.URL, err = urlhelpers.ParseSourceURL(u) + + return alias, ds, err +} diff --git a/config_test.go b/config_test.go new file mode 100644 index 00000000..de196459 --- /dev/null +++ b/config_test.go @@ -0,0 +1,891 @@ +package gomplate + +import ( + "net/http" + "net/url" + "runtime" + "strings" + "testing" + "time" + + "github.com/hairyhenderson/gomplate/v4/internal/iohelpers" + "github.com/hairyhenderson/yaml" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParseConfigFile(t *testing.T) { + t.Parallel() + in := "in: hello world\n" + expected := &Config{ + Input: "hello world", + } + cf, err := Parse(strings.NewReader(in)) + require.NoError(t, err) + assert.Equal(t, expected, cf) + + // legacy array form for templates (will be removed in v4.1.0 or so) + in = `in: hello world +templates: + - foo=bar + - baz=https://example.com/baz.yaml +` + + expected = &Config{ + Input: "hello world", + Templates: map[string]DataSource{ + "foo": {URL: mustURL("bar")}, + "baz": {URL: mustURL("https://example.com/baz.yaml")}, + }, + } + cf, err = Parse(strings.NewReader(in)) + require.NoError(t, err) + assert.Equal(t, expected, cf) + + in = `in: hello world +outputFiles: [out.txt] +chmod: 644 + +datasources: + data: + url: file:///data.json + moredata: + url: https://example.com/more.json + header: + Authorization: ["Bearer abcd1234"] + +context: + .: + url: file:///data.json + +plugins: + foo: + cmd: echo + pipe: true + +templates: + foo: + url: file:///tmp/foo.t + +pluginTimeout: 2s +` + expected = &Config{ + Input: "hello world", + OutputFiles: []string{"out.txt"}, + DataSources: map[string]DataSource{ + "data": { + URL: mustURL("file:///data.json"), + }, + "moredata": { + URL: mustURL("https://example.com/more.json"), + Header: map[string][]string{ + "Authorization": {"Bearer abcd1234"}, + }, + }, + }, + Context: map[string]DataSource{ + ".": { + URL: mustURL("file:///data.json"), + }, + }, + OutMode: "644", + Plugins: map[string]PluginConfig{ + "foo": {Cmd: "echo", Pipe: true}, + }, + Templates: map[string]DataSource{"foo": {URL: mustURL("file:///tmp/foo.t")}}, + PluginTimeout: 2 * time.Second, + } + + cf, err = Parse(strings.NewReader(in)) + require.NoError(t, err) + assert.EqualValues(t, expected, cf) +} + +func mustURL(s string) *url.URL { + u, err := url.Parse(s) + if err != nil { + panic(err) + } + + return u +} + +func TestValidate(t *testing.T) { + t.Parallel() + require.NoError(t, validateConfig("")) + + require.Error(t, validateConfig(`in: foo +inputFiles: [bar] +`)) + require.Error(t, validateConfig(`inputDir: foo +inputFiles: [bar] +`)) + require.Error(t, validateConfig(`inputDir: foo +in: bar +`)) + + require.Error(t, validateConfig(`outputDir: foo +outputFiles: [bar] +`)) + + require.Error(t, validateConfig(`in: foo +outputFiles: [bar, baz] +`)) + + require.Error(t, validateConfig(`inputFiles: [foo] +outputFiles: [bar, baz] +`)) + + require.Error(t, validateConfig(`outputDir: foo +outputFiles: [bar] +`)) + + require.Error(t, validateConfig(`outputDir: foo +`)) + + require.Error(t, validateConfig(`outputMap: foo +`)) + + require.Error(t, validateConfig(`outputMap: foo +outputFiles: [bar] +`)) + + require.Error(t, validateConfig(`inputDir: foo +outputDir: bar +outputMap: bar +`)) + + require.Error(t, validateConfig(`execPipe: true +`)) + require.Error(t, validateConfig(`execPipe: true +postExec: "" +`)) + + require.NoError(t, validateConfig(`execPipe: true +postExec: [echo, foo] +`)) + + require.Error(t, validateConfig(`execPipe: true +outputFiles: [foo] +postExec: [echo] +`)) + + require.NoError(t, validateConfig(`execPipe: true +inputFiles: ['-'] +postExec: [echo] +`)) + + require.Error(t, validateConfig(`inputDir: foo +execPipe: true +outputDir: foo +postExec: [echo] +`)) + + require.Error(t, validateConfig(`inputDir: foo +execPipe: true +outputMap: foo +postExec: [echo] +`)) +} + +func validateConfig(c string) error { + in := strings.NewReader(c) + cfg, err := Parse(in) + if err != nil { + return err + } + err = cfg.Validate() + return err +} + +func TestMergeFrom(t *testing.T) { + t.Parallel() + cfg := &Config{ + Input: "hello world", + DataSources: map[string]DataSource{ + "data": { + URL: mustURL("file:///data.json"), + }, + "moredata": { + URL: mustURL("https://example.com/more.json"), + Header: http.Header{ + "Authorization": {"Bearer abcd1234"}, + }, + }, + }, + Context: map[string]DataSource{ + "foo": { + URL: mustURL("https://example.com/foo.yaml"), + Header: http.Header{ + "Accept": {"application/yaml"}, + }, + }, + }, + OutMode: "644", + } + other := &Config{ + OutputFiles: []string{"out.txt"}, + DataSources: map[string]DataSource{ + "data": { + Header: http.Header{ + "Accept": {"foo/bar"}, + }, + }, + }, + Context: map[string]DataSource{ + "foo": { + Header: http.Header{ + "Accept": {"application/json"}, + }, + }, + "bar": {URL: mustURL("stdin:///")}, + }, + } + + expected := &Config{ + Input: "hello world", + OutputFiles: []string{"out.txt"}, + DataSources: map[string]DataSource{ + "data": { + URL: mustURL("file:///data.json"), + Header: http.Header{ + "Accept": {"foo/bar"}, + }, + }, + "moredata": { + URL: mustURL("https://example.com/more.json"), + Header: http.Header{ + "Authorization": {"Bearer abcd1234"}, + }, + }, + }, + Context: map[string]DataSource{ + "foo": { + URL: mustURL("https://example.com/foo.yaml"), + Header: http.Header{ + "Accept": {"application/json"}, + }, + }, + "bar": {URL: mustURL("stdin:///")}, + }, + OutMode: "644", + } + + assert.EqualValues(t, expected, cfg.MergeFrom(other)) + + cfg = &Config{ + Input: "hello world", + } + other = &Config{ + InputFiles: []string{"in.tmpl", "in2.tmpl"}, + OutputFiles: []string{"out", "out2"}, + } + expected = &Config{ + InputFiles: []string{"in.tmpl", "in2.tmpl"}, + OutputFiles: []string{"out", "out2"}, + } + + assert.EqualValues(t, expected, cfg.MergeFrom(other)) + + cfg = &Config{ + Input: "hello world", + OutputFiles: []string{"out", "out2"}, + } + other = &Config{ + InputDir: "in/", + OutputDir: "out/", + } + expected = &Config{ + InputDir: "in/", + OutputDir: "out/", + } + + assert.EqualValues(t, expected, cfg.MergeFrom(other)) + + cfg = &Config{ + Input: "hello world", + OutputFiles: []string{"out"}, + } + other = &Config{ + Input: "hi", + ExecPipe: true, + PostExec: []string{"cat"}, + } + expected = &Config{ + Input: "hi", + ExecPipe: true, + PostExec: []string{"cat"}, + } + + assert.EqualValues(t, expected, cfg.MergeFrom(other)) + + cfg = &Config{ + Input: "hello world", + OutputFiles: []string{"-"}, + Plugins: map[string]PluginConfig{ + "sleep": {Cmd: "echo"}, + }, + PluginTimeout: 500 * time.Microsecond, + } + other = &Config{ + InputFiles: []string{"-"}, + OutputFiles: []string{"-"}, + Plugins: map[string]PluginConfig{ + "sleep": {Cmd: "sleep.sh"}, + }, + } + expected = &Config{ + Input: "hello world", + OutputFiles: []string{"-"}, + Plugins: map[string]PluginConfig{ + "sleep": {Cmd: "sleep.sh"}, + }, + PluginTimeout: 500 * time.Microsecond, + } + + assert.EqualValues(t, expected, cfg.MergeFrom(other)) + + cfg = &Config{ + Input: "hello world", + OutMode: "644", + } + other = &Config{ + OutputFiles: []string{"out.txt"}, + Context: map[string]DataSource{ + "foo": { + URL: mustURL("https://example.com/foo.yaml"), + Header: http.Header{ + "Accept": {"application/json"}, + }, + }, + "bar": {URL: mustURL("stdin:///")}, + }, + DataSources: map[string]DataSource{ + "data": { + URL: mustURL("file:///data.json"), + }, + "moredata": { + URL: mustURL("https://example.com/more.json"), + Header: http.Header{ + "Authorization": {"Bearer abcd1234"}, + }, + }, + }, + } + expected = &Config{ + Input: "hello world", + OutputFiles: []string{"out.txt"}, + Context: map[string]DataSource{ + "foo": { + URL: mustURL("https://example.com/foo.yaml"), + Header: http.Header{ + "Accept": {"application/json"}, + }, + }, + "bar": {URL: mustURL("stdin:///")}, + }, + DataSources: map[string]DataSource{ + "data": { + URL: mustURL("file:///data.json"), + }, + "moredata": { + URL: mustURL("https://example.com/more.json"), + Header: http.Header{ + "Authorization": {"Bearer abcd1234"}, + }, + }, + }, + OutMode: "644", + } + + assert.EqualValues(t, expected, cfg.MergeFrom(other)) + + // test template merging & a few other things + cfg = &Config{ + InputDir: "indir/", + ExcludeGlob: []string{"*.txt"}, + Templates: map[string]DataSource{ + "foo": { + URL: mustURL("file:///foo.yaml"), + }, + "bar": { + URL: mustURL("stdin:///"), + Header: http.Header{"Accept": {"application/json"}}, + }, + }, + } + other = &Config{ + ExcludeGlob: []string{"*.yaml"}, + OutputMap: "${ .in }.out", + OutMode: "600", + LDelim: "${", + RDelim: "}", + Templates: map[string]DataSource{ + "foo": {URL: mustURL("https://example.com/foo.yaml")}, + "baz": {URL: mustURL("vault:///baz")}, + }, + } + expected = &Config{ + InputDir: "indir/", + ExcludeGlob: []string{"*.yaml"}, + OutputMap: "${ .in }.out", + OutMode: "600", + LDelim: "${", + RDelim: "}", + Templates: map[string]DataSource{ + "foo": {URL: mustURL("https://example.com/foo.yaml")}, + "bar": { + URL: mustURL("stdin:///"), + Header: http.Header{"Accept": {"application/json"}}, + }, + "baz": {URL: mustURL("vault:///baz")}, + }, + } + + assert.EqualValues(t, expected, cfg.MergeFrom(other)) +} + +func TestParseDataSourceFlags(t *testing.T) { + t.Parallel() + cfg := &Config{} + err := cfg.ParseDataSourceFlags(nil, nil, nil, nil) + require.NoError(t, err) + assert.EqualValues(t, &Config{}, cfg) + + cfg = &Config{} + err = cfg.ParseDataSourceFlags([]string{"foo/bar/baz.json"}, nil, nil, nil) + require.Error(t, err) + + cfg = &Config{} + err = cfg.ParseDataSourceFlags([]string{"baz=foo/bar/baz.json"}, nil, nil, nil) + require.NoError(t, err) + expected := &Config{ + DataSources: map[string]DataSource{ + "baz": {URL: mustURL("foo/bar/baz.json")}, + }, + } + assert.EqualValues(t, expected, cfg, "expected: %+v\nactual: %+v\n", expected, cfg) + + cfg = &Config{} + err = cfg.ParseDataSourceFlags( + []string{"baz=foo/bar/baz.json"}, + nil, + nil, + []string{"baz=Accept: application/json"}) + require.NoError(t, err) + assert.EqualValues(t, &Config{ + DataSources: map[string]DataSource{ + "baz": { + URL: mustURL("foo/bar/baz.json"), + Header: http.Header{ + "Accept": {"application/json"}, + }, + }, + }, + }, cfg) + + cfg = &Config{} + err = cfg.ParseDataSourceFlags( + []string{"baz=foo/bar/baz.json"}, + []string{"foo=http://example.com"}, + nil, + []string{ + "foo=Accept: application/json", + "bar=Authorization: Basic xxxxx", + }, + ) + require.NoError(t, err) + assert.EqualValues(t, &Config{ + DataSources: map[string]DataSource{ + "baz": {URL: mustURL("foo/bar/baz.json")}, + }, + Context: map[string]DataSource{ + "foo": { + URL: mustURL("http://example.com"), + Header: http.Header{ + "Accept": {"application/json"}, + }, + }, + }, + ExtraHeaders: map[string]http.Header{ + "bar": {"Authorization": {"Basic xxxxx"}}, + }, + }, cfg) + + cfg = &Config{} + err = cfg.ParseDataSourceFlags( + nil, + nil, + []string{"foo=http://example.com", "file.tmpl", "tmpldir/"}, + []string{"foo=Accept: application/json", "bar=Authorization: Basic xxxxx"}, + ) + require.NoError(t, err) + assert.EqualValues(t, &Config{ + Templates: map[string]DataSource{ + "foo": { + URL: mustURL("http://example.com"), + Header: http.Header{"Accept": {"application/json"}}, + }, + "file.tmpl": {URL: mustURL("file.tmpl")}, + "tmpldir/": {URL: mustURL("tmpldir/")}, + }, + ExtraHeaders: map[string]http.Header{ + "bar": {"Authorization": {"Basic xxxxx"}}, + }, + }, cfg) +} + +func TestParsePluginFlags(t *testing.T) { + t.Parallel() + cfg := &Config{} + err := cfg.ParsePluginFlags(nil) + require.NoError(t, err) + + cfg = &Config{} + err = cfg.ParsePluginFlags([]string{"foo=bar"}) + require.NoError(t, err) + assert.EqualValues(t, &Config{Plugins: map[string]PluginConfig{"foo": {Cmd: "bar"}}}, cfg) +} + +func TestConfig_String(t *testing.T) { + t.Run("defaults", func(t *testing.T) { + c := &Config{} + c.ApplyDefaults() + + expected := `--- +inputFiles: ['-'] +outputFiles: ['-'] +leftDelim: '{{' +rightDelim: '}}' +missingKey: error +pluginTimeout: 5s +` + assert.Equal(t, expected, c.String()) + }) + + t.Run("overridden values", func(t *testing.T) { + c := &Config{ + LDelim: "L", + RDelim: "R", + Input: "foo", + OutputFiles: []string{"-"}, + Templates: map[string]DataSource{ + "foo": {URL: mustURL("https://www.example.com/foo.tmpl")}, + "bar": {URL: mustURL("file:///tmp/bar.t")}, + }, + } + expected := `--- +in: foo +outputFiles: ['-'] +leftDelim: L +rightDelim: R +templates: + foo: + url: https://www.example.com/foo.tmpl + bar: + url: file:///tmp/bar.t +` + assert.YAMLEq(t, expected, c.String()) + }) + + t.Run("long input", func(t *testing.T) { + c := &Config{ + LDelim: "L", + RDelim: "R", + Input: "long input that should be truncated", + OutputFiles: []string{"-"}, + Templates: map[string]DataSource{ + "foo": {URL: mustURL("https://www.example.com/foo.tmpl")}, + "bar": {URL: mustURL("file:///tmp/bar.t")}, + }, + } + expected := `--- +in: long inp... +outputFiles: ['-'] +leftDelim: L +rightDelim: R +templates: + foo: + url: https://www.example.com/foo.tmpl + bar: + url: file:///tmp/bar.t +` + assert.YAMLEq(t, expected, c.String()) + }) + + t.Run("relative dirs", func(t *testing.T) { + c := &Config{ + InputDir: "in/", + OutputDir: "out/", + } + expected := `--- +inputDir: in/ +outputDir: out/ +` + assert.YAMLEq(t, expected, c.String()) + }) + + t.Run("outputmap", func(t *testing.T) { + c := &Config{ + InputDir: "in/", + OutputMap: "{{ .in }}", + } + expected := `--- +inputDir: in/ +outputMap: '{{ .in }}' +` + + assert.YAMLEq(t, expected, c.String()) + }) + + t.Run("pluginTimeout", func(t *testing.T) { + c := &Config{ + PluginTimeout: 500 * time.Millisecond, + } + expected := `--- +pluginTimeout: 500ms +` + + assert.YAMLEq(t, expected, c.String()) + }) + + t.Run("plugins", func(t *testing.T) { + c := &Config{ + Plugins: map[string]PluginConfig{ + "foo": { + Cmd: "bar", + Timeout: 1 * time.Second, + Pipe: true, + }, + }, + } + expected := `--- +plugins: + foo: + cmd: bar + timeout: 1s + pipe: true +` + + assert.YAMLEq(t, expected, c.String()) + }) +} + +func TestApplyDefaults(t *testing.T) { + t.Parallel() + cfg := &Config{} + + cfg.ApplyDefaults() + assert.EqualValues(t, []string{"-"}, cfg.InputFiles) + assert.EqualValues(t, []string{"-"}, cfg.OutputFiles) + assert.Empty(t, cfg.OutputDir) + assert.Equal(t, "{{", cfg.LDelim) + assert.Equal(t, "}}", cfg.RDelim) + + cfg = &Config{ + InputDir: "in", + } + + cfg.ApplyDefaults() + assert.Empty(t, cfg.InputFiles) + assert.Empty(t, cfg.OutputFiles) + assert.Equal(t, ".", cfg.OutputDir) + assert.Equal(t, "{{", cfg.LDelim) + assert.Equal(t, "}}", cfg.RDelim) + + cfg = &Config{ + Input: "foo", + LDelim: "<", + RDelim: ">", + } + + cfg.ApplyDefaults() + assert.Empty(t, cfg.InputFiles) + assert.EqualValues(t, []string{"-"}, cfg.OutputFiles) + assert.Empty(t, cfg.OutputDir) + assert.Equal(t, "<", cfg.LDelim) + assert.Equal(t, ">", cfg.RDelim) + + cfg = &Config{ + Input: "foo", + ExecPipe: true, + } + + cfg.ApplyDefaults() + assert.Empty(t, cfg.InputFiles) + assert.EqualValues(t, []string{"-"}, cfg.OutputFiles) + assert.Empty(t, cfg.OutputDir) + assert.True(t, cfg.ExecPipe) + + cfg = &Config{ + InputDir: "foo", + OutputMap: "bar", + } + + cfg.ApplyDefaults() + assert.Empty(t, cfg.InputFiles) + assert.Empty(t, cfg.Input) + assert.Empty(t, cfg.OutputFiles) + assert.Empty(t, cfg.OutputDir) + assert.False(t, cfg.ExecPipe) + assert.Equal(t, "bar", cfg.OutputMap) +} + +func TestGetMode(t *testing.T) { + c := &Config{} + m, o, err := c.GetMode() + require.NoError(t, err) + assert.Equal(t, iohelpers.NormalizeFileMode(0), m) + assert.False(t, o) + + c = &Config{OutMode: "755"} + m, o, err = c.GetMode() + require.NoError(t, err) + assert.Equal(t, iohelpers.NormalizeFileMode(0o755), m) + assert.True(t, o) + + c = &Config{OutMode: "0755"} + m, o, err = c.GetMode() + require.NoError(t, err) + assert.Equal(t, iohelpers.NormalizeFileMode(0o755), m) + assert.True(t, o) + + c = &Config{OutMode: "foo"} + _, _, err = c.GetMode() + require.Error(t, err) +} + +func TestParseHeaderArgs(t *testing.T) { + args := []string{ + "foo=Accept: application/json", + "bar=Authorization: Bearer supersecret", + } + expected := map[string]http.Header{ + "foo": { + "Accept": {"application/json"}, + }, + "bar": { + "Authorization": {"Bearer supersecret"}, + }, + } + parsed, err := parseHeaderArgs(args) + require.NoError(t, err) + assert.Equal(t, expected, parsed) + + _, err = parseHeaderArgs([]string{"foo"}) + require.Error(t, err) + + _, err = parseHeaderArgs([]string{"foo=bar"}) + require.Error(t, err) + + args = []string{ + "foo=Accept: application/json", + "foo=Foo: bar", + "foo=foo: baz", + "foo=fOO: qux", + "bar=Authorization: Bearer supersecret", + } + expected = map[string]http.Header{ + "foo": { + "Accept": {"application/json"}, + "Foo": {"bar", "baz", "qux"}, + }, + "bar": { + "Authorization": {"Bearer supersecret"}, + }, + } + parsed, err = parseHeaderArgs(args) + require.NoError(t, err) + assert.Equal(t, expected, parsed) +} + +func TestParseDatasourceArgNoAlias(t *testing.T) { + alias, ds, err := parseDatasourceArg("foo.json") + require.NoError(t, err) + assert.Equal(t, "foo", alias) + assert.Empty(t, ds.URL.Scheme) + + _, _, err = parseDatasourceArg("../foo.json") + require.Error(t, err) + + _, _, err = parseDatasourceArg("ftp://example.com/foo.yml") + require.Error(t, err) +} + +func TestParseDatasourceArgWithAlias(t *testing.T) { + alias, ds, err := parseDatasourceArg("data=foo.json") + require.NoError(t, err) + assert.Equal(t, "data", alias) + assert.EqualValues(t, &url.URL{Path: "foo.json"}, ds.URL) + + alias, ds, err = parseDatasourceArg("data=/otherdir/foo.json") + require.NoError(t, err) + assert.Equal(t, "data", alias) + assert.Equal(t, "file", ds.URL.Scheme) + assert.True(t, ds.URL.IsAbs()) + assert.Equal(t, "/otherdir/foo.json", ds.URL.Path) + + if runtime.GOOS == "windows" { + alias, ds, err = parseDatasourceArg("data=foo.json") + require.NoError(t, err) + assert.Equal(t, "data", alias) + assert.EqualValues(t, &url.URL{Path: "foo.json"}, ds.URL) + + alias, ds, err = parseDatasourceArg(`data=\otherdir\foo.json`) + require.NoError(t, err) + assert.Equal(t, "data", alias) + assert.EqualValues(t, &url.URL{Scheme: "file", Path: "/otherdir/foo.json"}, ds.URL) + + alias, ds, err = parseDatasourceArg("data=C:\\windowsdir\\foo.json") + require.NoError(t, err) + assert.Equal(t, "data", alias) + assert.EqualValues(t, &url.URL{Scheme: "file", Path: "C:/windowsdir/foo.json"}, ds.URL) + + alias, ds, err = parseDatasourceArg("data=\\\\somehost\\share\\foo.json") + require.NoError(t, err) + assert.Equal(t, "data", alias) + assert.EqualValues(t, &url.URL{Scheme: "file", Host: "somehost", Path: "/share/foo.json"}, ds.URL) + } + + alias, ds, err = parseDatasourceArg("data=sftp://example.com/blahblah/foo.json") + require.NoError(t, err) + assert.Equal(t, "data", alias) + assert.EqualValues(t, &url.URL{Scheme: "sftp", Host: "example.com", Path: "/blahblah/foo.json"}, ds.URL) + + alias, ds, err = parseDatasourceArg("merged=merge:./foo.yaml|http://example.com/bar.json%3Ffoo=bar") + require.NoError(t, err) + assert.Equal(t, "merged", alias) + assert.EqualValues(t, &url.URL{Scheme: "merge", Opaque: "./foo.yaml|http://example.com/bar.json%3Ffoo=bar"}, ds.URL) +} + +func TestPluginConfig_UnmarshalYAML(t *testing.T) { + in := `foo` + out := PluginConfig{} + err := yaml.Unmarshal([]byte(in), &out) + require.NoError(t, err) + assert.EqualValues(t, PluginConfig{Cmd: "foo"}, out) + + in = `[foo, bar]` + out = PluginConfig{} + err = yaml.Unmarshal([]byte(in), &out) + require.Error(t, err) + + in = `cmd: foo` + out = PluginConfig{} + err = yaml.Unmarshal([]byte(in), &out) + require.NoError(t, err) + assert.EqualValues(t, PluginConfig{Cmd: "foo"}, out) + + in = `cmd: foo +timeout: 10ms +pipe: true +` + out = PluginConfig{} + err = yaml.Unmarshal([]byte(in), &out) + require.NoError(t, err) + assert.EqualValues(t, PluginConfig{ + Cmd: "foo", + Timeout: time.Duration(10) * time.Millisecond, + Pipe: true, + }, out) +} diff --git a/context_test.go b/context_test.go index 903a0fc7..a0985be9 100644 --- a/context_test.go +++ b/context_test.go @@ -7,7 +7,6 @@ import ( "testing" "github.com/hairyhenderson/go-fsimpl" - "github.com/hairyhenderson/gomplate/v4/internal/config" "github.com/hairyhenderson/gomplate/v4/internal/datafs" "github.com/stretchr/testify/assert" @@ -46,8 +45,8 @@ func TestCreateContext(t *testing.T) { uf, _ := url.Parse(fooURL) ub, _ := url.Parse(barURL) - reg.Register("foo", config.DataSource{URL: uf}) - reg.Register(".", config.DataSource{URL: ub}) + reg.Register("foo", DataSource{URL: uf}) + reg.Register(".", DataSource{URL: ub}) t.Setenv("foo", "foo: bar") c, err = createTmplContext(ctx, []string{"foo"}, sr) diff --git a/gomplate.go b/gomplate.go index b53a2629..586f0575 100644 --- a/gomplate.go +++ b/gomplate.go @@ -6,17 +6,17 @@ import ( "bytes" "context" "fmt" + "log/slog" "path/filepath" "strings" "text/template" "time" - "github.com/hairyhenderson/gomplate/v4/internal/config" "github.com/hairyhenderson/gomplate/v4/internal/datafs" ) // Run all gomplate templates specified by the given configuration -func Run(ctx context.Context, cfg *config.Config) error { +func Run(ctx context.Context, cfg *Config) error { Metrics = newMetrics() // apply defaults before validation @@ -27,6 +27,14 @@ func Run(ctx context.Context, cfg *config.Config) error { return fmt.Errorf("failed to validate config: %w\n%+v", err, cfg) } + if cfg.Experimental { + slog.SetDefault(slog.With("experimental", true)) + slog.InfoContext(ctx, "experimental functions and features enabled!") + + ctx = SetExperimental(ctx) + } + + // bind plugins from the configuration to the funcMap funcMap := template.FuncMap{} err = bindPlugins(ctx, cfg, funcMap) if err != nil { @@ -36,14 +44,26 @@ func Run(ctx context.Context, cfg *config.Config) error { // if a custom Stdin is set in the config, inject it into the context now ctx = datafs.ContextWithStdin(ctx, cfg.Stdin) + // if a custom FSProvider is set in the context, use it, otherwise inject + // the default now - one is needed for the calls below to gatherTemplates + // as well as the rendering itself + if datafs.FSProviderFromContext(ctx) == nil { + ctx = datafs.ContextWithFSProvider(ctx, DefaultFSProvider) + } + + // extract the rendering options from the config opts := optionsFromConfig(cfg) opts.Funcs = funcMap tr := newRenderer(opts) start := time.Now() + // figure out how to name output files (only relevant if we're dealing with an InputDir) namer := chooseNamer(cfg, tr) + + // prepare to render templates (read them in, open output writers, etc) tmpl, err := gatherTemplates(ctx, cfg, namer) + Metrics.GatherDuration = time.Since(start) if err != nil { Metrics.Errors++ @@ -59,22 +79,33 @@ func Run(ctx context.Context, cfg *config.Config) error { return nil } -func chooseNamer(cfg *config.Config, tr *renderer) func(context.Context, string) (string, error) { +type outputNamer interface { + // Name the output file for the given input path + Name(ctx context.Context, inPath string) (string, error) +} + +type outputNamerFunc func(context.Context, string) (string, error) + +func (f outputNamerFunc) Name(ctx context.Context, inPath string) (string, error) { + return f(ctx, inPath) +} + +func chooseNamer(cfg *Config, tr *renderer) outputNamer { if cfg.OutputMap == "" { return simpleNamer(cfg.OutputDir) } return mappingNamer(cfg.OutputMap, tr) } -func simpleNamer(outDir string) func(ctx context.Context, inPath string) (string, error) { - return func(_ context.Context, inPath string) (string, error) { +func simpleNamer(outDir string) outputNamer { + return outputNamerFunc(func(_ context.Context, inPath string) (string, error) { outPath := filepath.Join(outDir, inPath) return filepath.Clean(outPath), nil - } + }) } -func mappingNamer(outMap string, tr *renderer) func(context.Context, string) (string, error) { - return func(ctx context.Context, inPath string) (string, error) { +func mappingNamer(outMap string, tr *renderer) outputNamer { + return outputNamerFunc(func(ctx context.Context, inPath string) (string, error) { tcontext, err := createTmplContext(ctx, tr.tctxAliases, tr.sr) if err != nil { return "", err @@ -103,5 +134,5 @@ func mappingNamer(outMap string, tr *renderer) func(context.Context, string) (st } return filepath.Clean(strings.TrimSpace(out.String())), nil - } + }) } diff --git a/gomplate_test.go b/gomplate_test.go index 2e86c4fe..5b480dc9 100644 --- a/gomplate_test.go +++ b/gomplate_test.go @@ -29,7 +29,7 @@ func testTemplate(t *testing.T, tr *renderer, tmpl string) string { } func TestGetenvTemplates(t *testing.T) { - tr := newRenderer(Options{ + tr := newRenderer(RenderOptions{ Funcs: template.FuncMap{ "getenv": env.Getenv, "bool": conv.ToBool, @@ -41,7 +41,7 @@ func TestGetenvTemplates(t *testing.T) { } func TestBoolTemplates(t *testing.T) { - g := newRenderer(Options{ + g := newRenderer(RenderOptions{ Funcs: template.FuncMap{ "bool": conv.ToBool, }, @@ -55,7 +55,7 @@ func TestBoolTemplates(t *testing.T) { func TestEc2MetaTemplates(t *testing.T) { createGomplate := func(data map[string]string, region string) *renderer { ec2meta := aws.MockEC2Meta(data, nil, region) - return newRenderer(Options{Funcs: template.FuncMap{"ec2meta": ec2meta.Meta}}) + return newRenderer(RenderOptions{Funcs: template.FuncMap{"ec2meta": ec2meta.Meta}}) } g := createGomplate(nil, "") @@ -70,7 +70,7 @@ func TestEc2MetaTemplates(t *testing.T) { func TestEc2MetaTemplates_WithJSON(t *testing.T) { ec2meta := aws.MockEC2Meta(map[string]string{"obj": `"foo": "bar"`}, map[string]string{"obj": `"foo": "baz"`}, "") - g := newRenderer(Options{ + g := newRenderer(RenderOptions{ Funcs: template.FuncMap{ "ec2meta": ec2meta.Meta, "ec2dynamic": ec2meta.Dynamic, @@ -83,7 +83,7 @@ func TestEc2MetaTemplates_WithJSON(t *testing.T) { } func TestJSONArrayTemplates(t *testing.T) { - g := newRenderer(Options{ + g := newRenderer(RenderOptions{ Funcs: template.FuncMap{ "jsonArray": parsers.JSONArray, }, @@ -94,7 +94,7 @@ func TestJSONArrayTemplates(t *testing.T) { } func TestYAMLTemplates(t *testing.T) { - g := newRenderer(Options{ + g := newRenderer(RenderOptions{ Funcs: template.FuncMap{ "yaml": parsers.YAML, "yamlArray": parsers.YAMLArray, @@ -107,7 +107,7 @@ func TestYAMLTemplates(t *testing.T) { } func TestHasTemplate(t *testing.T) { - g := newRenderer(Options{ + g := newRenderer(RenderOptions{ Funcs: template.FuncMap{ "yaml": parsers.YAML, "has": conv.Has, @@ -141,7 +141,7 @@ func TestMissingKey(t *testing.T) { } for name, tt := range tests { t.Run(name, func(t *testing.T) { - g := newRenderer(Options{ + g := newRenderer(RenderOptions{ MissingKey: tt.MissingKey, }) tmpl := `{{ .name }}` @@ -151,7 +151,7 @@ func TestMissingKey(t *testing.T) { } func TestCustomDelim(t *testing.T) { - g := newRenderer(Options{ + g := newRenderer(RenderOptions{ LDelim: "[", RDelim: "]", }) @@ -160,7 +160,7 @@ func TestCustomDelim(t *testing.T) { func TestSimpleNamer(t *testing.T) { n := simpleNamer("out/") - out, err := n(context.Background(), "file") + out, err := n.Name(context.Background(), "file") require.NoError(t, err) expected := filepath.FromSlash("out/file") assert.Equal(t, expected, out) @@ -176,13 +176,13 @@ func TestMappingNamer(t *testing.T) { }, } n := mappingNamer("out/{{ .in }}", tr) - out, err := n(ctx, "file") + out, err := n.Name(ctx, "file") require.NoError(t, err) expected := filepath.FromSlash("out/file") assert.Equal(t, expected, out) n = mappingNamer("out/{{ foo }}{{ .in }}", tr) - out, err = n(ctx, "file") + out, err = n.Name(ctx, "file") require.NoError(t, err) expected = filepath.FromSlash("out/foofile") assert.Equal(t, expected, out) diff --git a/internal/cmd/config.go b/internal/cmd/config.go index fc64f8e4..431139a1 100644 --- a/internal/cmd/config.go +++ b/internal/cmd/config.go @@ -1,14 +1,17 @@ package cmd import ( + "bytes" "context" "fmt" + "io" "log/slog" + "os" "time" + "github.com/hairyhenderson/gomplate/v4" "github.com/hairyhenderson/gomplate/v4/conv" "github.com/hairyhenderson/gomplate/v4/env" - "github.com/hairyhenderson/gomplate/v4/internal/config" "github.com/hairyhenderson/gomplate/v4/internal/datafs" "github.com/spf13/cobra" @@ -19,10 +22,10 @@ const ( ) // loadConfig is intended to be called before command execution. It: -// - creates a config.Config from the cobra flags -// - creates a config.Config from the config file (if present) +// - creates a gomplate.Config from the cobra flags +// - creates a gomplate.Config from the config file (if present) // - merges the two (flags take precedence) -func loadConfig(ctx context.Context, cmd *cobra.Command, args []string) (*config.Config, error) { +func loadConfig(ctx context.Context, cmd *cobra.Command, args []string) (*gomplate.Config, error) { flagConfig, err := cobraConfig(cmd, args) if err != nil { return nil, err @@ -64,7 +67,7 @@ func pickConfigFile(cmd *cobra.Command) (cfgFile string, required bool) { return cfgFile, required } -func readConfigFile(ctx context.Context, cmd *cobra.Command) (cfg *config.Config, err error) { +func readConfigFile(ctx context.Context, cmd *cobra.Command) (*gomplate.Config, error) { cfgFile, configRequired := pickConfigFile(cmd) // we only support loading configs from the local filesystem for now @@ -76,14 +79,14 @@ func readConfigFile(ctx context.Context, cmd *cobra.Command) (cfg *config.Config f, err := fsys.Open(cfgFile) if err != nil { if configRequired { - return cfg, fmt.Errorf("config file requested, but couldn't be opened: %w", err) + return nil, fmt.Errorf("config file requested, but couldn't be opened: %w", err) } return nil, nil } - cfg, err = config.Parse(f) + cfg, err := gomplate.Parse(f) if err != nil { - return cfg, fmt.Errorf("parsing config file %q: %w", cfgFile, err) + return nil, fmt.Errorf("parsing config file %q: %w", cfgFile, err) } slog.DebugContext(ctx, "using config file", "cfgFile", cfgFile) @@ -92,8 +95,8 @@ func readConfigFile(ctx context.Context, cmd *cobra.Command) (cfg *config.Config } // cobraConfig - initialize a config from the commandline options -func cobraConfig(cmd *cobra.Command, args []string) (cfg *config.Config, err error) { - cfg = &config.Config{} +func cobraConfig(cmd *cobra.Command, args []string) (cfg *gomplate.Config, err error) { + cfg = &gomplate.Config{} cfg.InputFiles, err = getStringSlice(cmd, "file") if err != nil { return nil, err @@ -241,7 +244,7 @@ func processIncludes(includes, excludes []string) []string { return out } -func applyEnvVars(_ context.Context, cfg *config.Config) (*config.Config, error) { +func applyEnvVars(_ context.Context, cfg *gomplate.Config) (*gomplate.Config, error) { if to := env.Getenv("GOMPLATE_PLUGIN_TIMEOUT"); cfg.PluginTimeout == 0 && to != "" { t, err := time.ParseDuration(to) if err != nil { @@ -263,3 +266,24 @@ func applyEnvVars(_ context.Context, cfg *config.Config) (*config.Config, error) return cfg, nil } + +// postExecInput - return the input to be used after the post-exec command. The +// input config may be modified if ExecPipe is set (OutputFiles is set to "-"), +// and Stdout is redirected to a pipe. +func postExecInput(cfg *gomplate.Config) io.Reader { + if cfg.ExecPipe { + pipe := &bytes.Buffer{} + cfg.OutputFiles = []string{"-"} + + // --exec-pipe redirects standard out to the out pipe + cfg.Stdout = pipe + + return pipe + } + + if cfg.Stdin != nil { + return cfg.Stdin + } + + return os.Stdin +} diff --git a/internal/cmd/config_test.go b/internal/cmd/config_test.go index 1c9adfe5..56d7b1d5 100644 --- a/internal/cmd/config_test.go +++ b/internal/cmd/config_test.go @@ -6,12 +6,13 @@ import ( "fmt" "io/fs" "net/url" + "os" "testing" "testing/fstest" "time" "github.com/hairyhenderson/go-fsimpl" - "github.com/hairyhenderson/gomplate/v4/internal/config" + "github.com/hairyhenderson/gomplate/v4" "github.com/hairyhenderson/gomplate/v4/internal/datafs" "github.com/spf13/cobra" @@ -47,7 +48,7 @@ func TestReadConfigFile(t *testing.T) { cfg, err := readConfigFile(ctx, cmd) require.NoError(t, err) - assert.EqualValues(t, &config.Config{}, cfg) + assert.EqualValues(t, &gomplate.Config{}, cfg) cmd.ParseFlags([]string{"--config", "config.yaml"}) @@ -55,7 +56,7 @@ func TestReadConfigFile(t *testing.T) { cfg, err = readConfigFile(ctx, cmd) require.NoError(t, err) - assert.EqualValues(t, &config.Config{Input: "hello world"}, cfg) + assert.EqualValues(t, &gomplate.Config{Input: "hello world"}, cfg) fsys["config.yaml"] = &fstest.MapFile{Data: []byte("in: hello world\nin: \n")} @@ -87,7 +88,7 @@ func TestLoadConfig(t *testing.T) { cmd.ParseFlags(nil) out, err := loadConfig(ctx, cmd, cmd.Flags().Args()) - expected := &config.Config{ + expected := &gomplate.Config{ Stdin: stdin, Stdout: stdout, Stderr: stderr, @@ -97,7 +98,7 @@ func TestLoadConfig(t *testing.T) { cmd.ParseFlags([]string{"--in", "foo"}) out, err = loadConfig(ctx, cmd, cmd.Flags().Args()) - expected = &config.Config{ + expected = &gomplate.Config{ Input: "foo", Stdin: stdin, Stdout: out.Stdout, @@ -108,19 +109,37 @@ func TestLoadConfig(t *testing.T) { cmd.ParseFlags([]string{"--in", "foo", "--exec-pipe", "--", "tr", "[a-z]", "[A-Z]"}) out, err = loadConfig(ctx, cmd, cmd.Flags().Args()) - expected = &config.Config{ - Input: "foo", - ExecPipe: true, - PostExec: []string{"tr", "[a-z]", "[A-Z]"}, - PostExecInput: out.PostExecInput, - Stdin: stdin, - Stdout: out.Stdout, - Stderr: stderr, + expected = &gomplate.Config{ + Input: "foo", + ExecPipe: true, + PostExec: []string{"tr", "[a-z]", "[A-Z]"}, + Stdin: stdin, + Stdout: out.Stdout, + Stderr: stderr, } require.NoError(t, err) assert.EqualValues(t, expected, out) } +func TestPostExecInput(t *testing.T) { + t.Parallel() + + cfg := &gomplate.Config{ExecPipe: false} + assert.Equal(t, os.Stdin, postExecInput(cfg)) + + cfg = &gomplate.Config{ExecPipe: true} + + pipe := postExecInput(cfg) + assert.IsType(t, &bytes.Buffer{}, pipe) + assert.Equal(t, []string{"-"}, cfg.OutputFiles) + assert.Equal(t, pipe, cfg.Stdout) + + stdin := &bytes.Buffer{} + cfg = &gomplate.Config{ExecPipe: false, Stdin: stdin} + pipe = postExecInput(cfg) + assert.Equal(t, stdin, pipe) +} + func TestCobraConfig(t *testing.T) { t.Parallel() cmd := &cobra.Command{} @@ -133,13 +152,13 @@ func TestCobraConfig(t *testing.T) { cfg, err := cobraConfig(cmd, cmd.Flags().Args()) require.NoError(t, err) - assert.EqualValues(t, &config.Config{}, cfg) + assert.EqualValues(t, &gomplate.Config{}, cfg) cmd.ParseFlags([]string{"--file", "in", "--", "echo", "foo"}) cfg, err = cobraConfig(cmd, cmd.Flags().Args()) require.NoError(t, err) - assert.EqualValues(t, &config.Config{ + assert.EqualValues(t, &gomplate.Config{ InputFiles: []string{"in"}, PostExec: []string{"echo", "foo"}, }, cfg) @@ -195,68 +214,68 @@ func TestPickConfigFile(t *testing.T) { func TestApplyEnvVars(t *testing.T) { t.Run("invalid GOMPLATE_PLUGIN_TIMEOUT", func(t *testing.T) { t.Setenv("GOMPLATE_PLUGIN_TIMEOUT", "bogus") - _, err := applyEnvVars(context.Background(), &config.Config{}) + _, err := applyEnvVars(context.Background(), &gomplate.Config{}) require.Error(t, err) }) data := []struct { - input, expected *config.Config + input, expected *gomplate.Config env string value string }{ { - &config.Config{PluginTimeout: 2 * time.Second}, - &config.Config{PluginTimeout: 2 * time.Second}, + &gomplate.Config{PluginTimeout: 2 * time.Second}, + &gomplate.Config{PluginTimeout: 2 * time.Second}, "GOMPLATE_PLUGIN_TIMEOUT", "bogus", }, { - &config.Config{}, - &config.Config{PluginTimeout: 2 * time.Second}, + &gomplate.Config{}, + &gomplate.Config{PluginTimeout: 2 * time.Second}, "GOMPLATE_PLUGIN_TIMEOUT", "2s", }, { - &config.Config{PluginTimeout: 100 * time.Millisecond}, - &config.Config{PluginTimeout: 100 * time.Millisecond}, + &gomplate.Config{PluginTimeout: 100 * time.Millisecond}, + &gomplate.Config{PluginTimeout: 100 * time.Millisecond}, "GOMPLATE_PLUGIN_TIMEOUT", "2s", }, { - &config.Config{}, - &config.Config{Experimental: false}, + &gomplate.Config{}, + &gomplate.Config{Experimental: false}, "GOMPLATE_EXPERIMENTAL", "bogus", }, { - &config.Config{}, - &config.Config{Experimental: true}, + &gomplate.Config{}, + &gomplate.Config{Experimental: true}, "GOMPLATE_EXPERIMENTAL", "true", }, { - &config.Config{Experimental: true}, - &config.Config{Experimental: true}, + &gomplate.Config{Experimental: true}, + &gomplate.Config{Experimental: true}, "GOMPLATE_EXPERIMENTAL", "false", }, { - &config.Config{}, - &config.Config{LDelim: "--"}, + &gomplate.Config{}, + &gomplate.Config{LDelim: "--"}, "GOMPLATE_LEFT_DELIM", "--", }, { - &config.Config{LDelim: "{{"}, - &config.Config{LDelim: "{{"}, + &gomplate.Config{LDelim: "{{"}, + &gomplate.Config{LDelim: "{{"}, "GOMPLATE_LEFT_DELIM", "--", }, { - &config.Config{}, - &config.Config{RDelim: ")>"}, + &gomplate.Config{}, + &gomplate.Config{RDelim: ")>"}, "GOMPLATE_RIGHT_DELIM", ")>", }, { - &config.Config{RDelim: "}}"}, - &config.Config{RDelim: "}}"}, + &gomplate.Config{RDelim: "}}"}, + &gomplate.Config{RDelim: "}}"}, "GOMPLATE_RIGHT_DELIM", ")>", }, { - &config.Config{RDelim: "}}"}, - &config.Config{RDelim: "}}"}, + &gomplate.Config{RDelim: "}}"}, + &gomplate.Config{RDelim: "}}"}, "GOMPLATE_RIGHT_DELIM", "", }, } diff --git a/internal/cmd/main.go b/internal/cmd/main.go index 22388521..fa7c97c3 100644 --- a/internal/cmd/main.go +++ b/internal/cmd/main.go @@ -88,12 +88,8 @@ func NewGomplateCmd(stderr io.Writer) *cobra.Command { return err } - if cfg.Experimental { - slog.SetDefault(slog.With("experimental", true)) - slog.InfoContext(ctx, "experimental functions and features enabled!") - - ctx = gomplate.SetExperimental(ctx) - } + // get the post-exec reader now as this may modify cfg + postExecReader := postExecInput(cfg) slog.DebugContext(ctx, fmt.Sprintf("starting %s", cmd.Name())) slog.DebugContext(ctx, fmt.Sprintf("config is:\n%v", cfg), @@ -101,6 +97,7 @@ func NewGomplateCmd(stderr io.Writer) *cobra.Command { slog.String("build", version.GitCommit), ) + // run the main command err = gomplate.Run(ctx, cfg) cmd.SilenceErrors = true cmd.SilenceUsage = true @@ -113,7 +110,8 @@ func NewGomplateCmd(stderr io.Writer) *cobra.Command { if err != nil { return err } - return postRunExec(ctx, cfg.PostExec, cfg.PostExecInput, cmd.OutOrStdout(), cmd.ErrOrStderr()) + + return postRunExec(ctx, cfg.PostExec, postExecReader, cmd.OutOrStdout(), cmd.ErrOrStderr()) }, Args: optionalExecArgs, } diff --git a/internal/config/configfile.go b/internal/config/configfile.go deleted file mode 100644 index d255a36c..00000000 --- a/internal/config/configfile.go +++ /dev/null @@ -1,602 +0,0 @@ -package config - -import ( - "bytes" - "context" - "fmt" - "io" - "net/http" - "net/url" - "os" - "path" - "strconv" - "strings" - "time" - - "golang.org/x/exp/slices" - - "github.com/hairyhenderson/gomplate/v4/internal/iohelpers" - "github.com/hairyhenderson/gomplate/v4/internal/urlhelpers" - "github.com/hairyhenderson/yaml" -) - -// Parse a config file -func Parse(in io.Reader) (*Config, error) { - out := &Config{} - dec := yaml.NewDecoder(in) - err := dec.Decode(out) - if err != nil && err != io.EOF { - return out, fmt.Errorf("YAML decoding failed, syntax may be invalid: %w", err) - } - return out, nil -} - -// Config - configures the gomplate execution -type Config struct { - Stdin io.Reader `yaml:"-"` - Stdout io.Writer `yaml:"-"` - Stderr io.Writer `yaml:"-"` - - DataSources map[string]DataSource `yaml:"datasources,omitempty"` - Context map[string]DataSource `yaml:"context,omitempty"` - Plugins map[string]PluginConfig `yaml:"plugins,omitempty"` - Templates Templates `yaml:"templates,omitempty"` - - // Extra HTTP headers not attached to pre-defined datsources. Potentially - // used by datasources defined in the template. - ExtraHeaders map[string]http.Header `yaml:"-"` - - // internal use only, can't be injected in YAML - PostExecInput io.Reader `yaml:"-"` - - Input string `yaml:"in,omitempty"` - InputDir string `yaml:"inputDir,omitempty"` - InputFiles []string `yaml:"inputFiles,omitempty,flow"` - ExcludeGlob []string `yaml:"excludes,omitempty"` - ExcludeProcessingGlob []string `yaml:"excludeProcessing,omitempty"` - - OutputDir string `yaml:"outputDir,omitempty"` - OutputMap string `yaml:"outputMap,omitempty"` - OutputFiles []string `yaml:"outputFiles,omitempty,flow"` - OutMode string `yaml:"chmod,omitempty"` - - LDelim string `yaml:"leftDelim,omitempty"` - RDelim string `yaml:"rightDelim,omitempty"` - - MissingKey string `yaml:"missingKey,omitempty"` - - PostExec []string `yaml:"postExec,omitempty,flow"` - - PluginTimeout time.Duration `yaml:"pluginTimeout,omitempty"` - - ExecPipe bool `yaml:"execPipe,omitempty"` - Experimental bool `yaml:"experimental,omitempty"` -} - -type experimentalCtxKey struct{} - -func SetExperimental(ctx context.Context) context.Context { - return context.WithValue(ctx, experimentalCtxKey{}, true) -} - -func ExperimentalEnabled(ctx context.Context) bool { - v, ok := ctx.Value(experimentalCtxKey{}).(bool) - return ok && v -} - -// mergeDataSources - use d as defaults, and override with values from o -func mergeDataSources(d, o map[string]DataSource) map[string]DataSource { - for k, v := range o { - c, ok := d[k] - if ok { - d[k] = c.mergeFrom(v) - } else { - d[k] = v - } - } - return d -} - -// DataSource - datasource configuration -type DataSource struct { - URL *url.URL `yaml:"-"` - Header http.Header `yaml:"header,omitempty,flow"` -} - -// UnmarshalYAML - satisfy the yaml.Umarshaler interface - URLs aren't -// well supported, and anyway we need to do some extra parsing -func (d *DataSource) UnmarshalYAML(value *yaml.Node) error { - type raw struct { - Header http.Header - URL string - } - r := raw{} - err := value.Decode(&r) - if err != nil { - return err - } - u, err := urlhelpers.ParseSourceURL(r.URL) - if err != nil { - return fmt.Errorf("could not parse datasource URL %q: %w", r.URL, err) - } - *d = DataSource{ - URL: u, - Header: r.Header, - } - return nil -} - -// MarshalYAML - satisfy the yaml.Marshaler interface - URLs aren't -// well supported, and anyway we need to do some extra parsing -func (d DataSource) MarshalYAML() (interface{}, error) { - type raw struct { - Header http.Header - URL string - } - r := raw{ - URL: d.URL.String(), - Header: d.Header, - } - return r, nil -} - -// mergeFrom - use this as default, and override with values from o -func (d DataSource) mergeFrom(o DataSource) DataSource { - if o.URL != nil { - d.URL = o.URL - } - if d.Header == nil { - d.Header = o.Header - } else { - for k, v := range o.Header { - d.Header[k] = v - } - } - return d -} - -type PluginConfig struct { - Cmd string - Args []string `yaml:"args,omitempty"` - Timeout time.Duration `yaml:"timeout,omitempty"` - Pipe bool `yaml:"pipe,omitempty"` -} - -// UnmarshalYAML - satisfy the yaml.Umarshaler interface - plugin configs can -// either be a plain string (to specify only the name), or a map with a name, -// timeout, and pipe flag. -func (p *PluginConfig) UnmarshalYAML(value *yaml.Node) error { - if value.Kind == yaml.ScalarNode { - s := "" - err := value.Decode(&s) - if err != nil { - return err - } - - *p = PluginConfig{Cmd: s} - return nil - } - - if value.Kind != yaml.MappingNode { - return fmt.Errorf("plugin config must be a string or map") - } - - type raw struct { - Cmd string - Args []string - Timeout time.Duration - Pipe bool - } - r := raw{} - err := value.Decode(&r) - if err != nil { - return err - } - - *p = PluginConfig(r) - - return nil -} - -// MergeFrom - use this Config as the defaults, and override it with any -// non-zero values from the other Config -// -// Note that Input/InputDir/InputFiles will override each other, as well as -// OutputDir/OutputFiles. -func (c *Config) MergeFrom(o *Config) *Config { - switch { - case !isZero(o.Input): - c.Input = o.Input - c.InputDir = "" - c.InputFiles = nil - c.OutputDir = "" - case !isZero(o.InputDir): - c.Input = "" - c.InputDir = o.InputDir - c.InputFiles = nil - case !isZero(o.InputFiles): - if !(len(o.InputFiles) == 1 && o.InputFiles[0] == "-") { - c.Input = "" - c.InputFiles = o.InputFiles - c.InputDir = "" - c.OutputDir = "" - } - } - - if !isZero(o.OutputMap) { - c.OutputDir = "" - c.OutputFiles = nil - c.OutputMap = o.OutputMap - } - if !isZero(o.OutputDir) { - c.OutputDir = o.OutputDir - c.OutputFiles = nil - c.OutputMap = "" - } - if !isZero(o.OutputFiles) { - c.OutputDir = "" - c.OutputFiles = o.OutputFiles - c.OutputMap = "" - } - if !isZero(o.ExecPipe) { - c.ExecPipe = o.ExecPipe - c.PostExec = o.PostExec - c.OutputFiles = o.OutputFiles - } - if !isZero(o.ExcludeGlob) { - c.ExcludeGlob = o.ExcludeGlob - } - if !isZero(o.ExcludeProcessingGlob) { - c.ExcludeProcessingGlob = o.ExcludeProcessingGlob - } - if !isZero(o.OutMode) { - c.OutMode = o.OutMode - } - if !isZero(o.LDelim) { - c.LDelim = o.LDelim - } - if !isZero(o.RDelim) { - c.RDelim = o.RDelim - } - if c.Templates == nil { - c.Templates = o.Templates - } else { - c.Templates = mergeDataSources(c.Templates, o.Templates) - } - if c.DataSources == nil { - c.DataSources = o.DataSources - } else { - c.DataSources = mergeDataSources(c.DataSources, o.DataSources) - } - if c.Context == nil { - c.Context = o.Context - } else { - c.Context = mergeDataSources(c.Context, o.Context) - } - if len(o.Plugins) > 0 { - for k, v := range o.Plugins { - c.Plugins[k] = v - } - } - - return c -} - -// ParseDataSourceFlags - sets DataSources, Context, and Templates fields from -// the key=value format flags as provided at the command-line -// Unreferenced headers will be set in c.ExtraHeaders -func (c *Config) ParseDataSourceFlags(datasources, contexts, templates, headers []string) error { - err := c.parseResources(datasources, contexts, templates) - if err != nil { - return err - } - - hdrs, err := parseHeaderArgs(headers) - if err != nil { - return err - } - - for k, v := range hdrs { - if d, ok := c.Context[k]; ok { - d.Header = v - c.Context[k] = d - delete(hdrs, k) - } - if d, ok := c.DataSources[k]; ok { - d.Header = v - c.DataSources[k] = d - delete(hdrs, k) - } - if t, ok := c.Templates[k]; ok { - t.Header = v - c.Templates[k] = t - delete(hdrs, k) - } - } - if len(hdrs) > 0 { - c.ExtraHeaders = hdrs - } - return nil -} - -func (c *Config) parseResources(datasources, contexts, templates []string) error { - for _, d := range datasources { - k, ds, err := parseDatasourceArg(d) - if err != nil { - return err - } - if c.DataSources == nil { - c.DataSources = map[string]DataSource{} - } - c.DataSources[k] = ds - } - for _, d := range contexts { - k, ds, err := parseDatasourceArg(d) - if err != nil { - return err - } - if c.Context == nil { - c.Context = map[string]DataSource{} - } - c.Context[k] = ds - } - for _, t := range templates { - k, ds, err := parseTemplateArg(t) - if err != nil { - return err - } - if c.Templates == nil { - c.Templates = map[string]DataSource{} - } - c.Templates[k] = ds - } - - return nil -} - -// ParsePluginFlags - sets the Plugins field from the -// key=value format flags as provided at the command-line -func (c *Config) ParsePluginFlags(plugins []string) error { - for _, plugin := range plugins { - parts := strings.SplitN(plugin, "=", 2) - if len(parts) < 2 { - return fmt.Errorf("plugin requires both name and path") - } - if c.Plugins == nil { - c.Plugins = map[string]PluginConfig{} - } - c.Plugins[parts[0]] = PluginConfig{Cmd: parts[1]} - } - return nil -} - -func parseDatasourceArg(value string) (alias string, ds DataSource, err error) { - alias, u, _ := strings.Cut(value, "=") - if u == "" { - u = alias - alias, _, _ = strings.Cut(value, ".") - if path.Base(u) != u { - err = fmt.Errorf("invalid argument (%s): must provide an alias with files not in working directory", value) - return alias, ds, err - } - } - - ds.URL, err = urlhelpers.ParseSourceURL(u) - - return alias, ds, err -} - -func parseHeaderArgs(headerArgs []string) (map[string]http.Header, error) { - headers := make(map[string]http.Header) - for _, v := range headerArgs { - ds, name, value, err := splitHeaderArg(v) - if err != nil { - return nil, err - } - if _, ok := headers[ds]; !ok { - headers[ds] = make(http.Header) - } - headers[ds][name] = append(headers[ds][name], strings.TrimSpace(value)) - } - return headers, nil -} - -func splitHeaderArg(arg string) (datasourceAlias, name, value string, err error) { - parts := strings.SplitN(arg, "=", 2) - if len(parts) != 2 { - err = fmt.Errorf("invalid datasource-header option '%s'", arg) - return "", "", "", err - } - datasourceAlias = parts[0] - name, value, err = splitHeader(parts[1]) - return datasourceAlias, name, value, err -} - -func splitHeader(header string) (name, value string, err error) { - parts := strings.SplitN(header, ":", 2) - if len(parts) != 2 { - err = fmt.Errorf("invalid HTTP Header format '%s'", header) - return "", "", err - } - name = http.CanonicalHeaderKey(parts[0]) - value = parts[1] - return name, value, nil -} - -// Validate the Config -func (c Config) Validate() (err error) { - err = notTogether( - []string{"in", "inputFiles", "inputDir"}, - c.Input, c.InputFiles, c.InputDir) - if err == nil { - err = notTogether( - []string{"outputFiles", "outputDir", "outputMap"}, - c.OutputFiles, c.OutputDir, c.OutputMap) - } - if err == nil { - err = notTogether( - []string{"outputDir", "outputMap", "execPipe"}, - c.OutputDir, c.OutputMap, c.ExecPipe) - } - - if err == nil { - err = mustTogether("outputDir", "inputDir", - c.OutputDir, c.InputDir) - } - - if err == nil { - err = mustTogether("outputMap", "inputDir", - c.OutputMap, c.InputDir) - } - - if err == nil { - f := len(c.InputFiles) - if f == 0 && c.Input != "" { - f = 1 - } - o := len(c.OutputFiles) - if f != o && !c.ExecPipe { - err = fmt.Errorf("must provide same number of 'outputFiles' (%d) as 'in' or 'inputFiles' (%d) options", o, f) - } - } - - if err == nil { - if c.ExecPipe && len(c.PostExec) == 0 { - err = fmt.Errorf("execPipe may only be used with a postExec command") - } - } - - if err == nil { - if c.ExecPipe && (len(c.OutputFiles) > 0 && c.OutputFiles[0] != "-") { - err = fmt.Errorf("must not set 'outputFiles' when using 'execPipe'") - } - } - - if err == nil { - missingKeyValues := []string{"", "error", "zero", "default", "invalid"} - if !slices.Contains(missingKeyValues, c.MissingKey) { - err = fmt.Errorf("not allowed value for the 'missing-key' flag: %s. Allowed values: %s", c.MissingKey, strings.Join(missingKeyValues, ",")) - } - } - - return err -} - -func notTogether(names []string, values ...interface{}) error { - found := "" - for i, value := range values { - if isZero(value) { - continue - } - if found != "" { - return fmt.Errorf("only one of these options is supported at a time: '%s', '%s'", - found, names[i]) - } - found = names[i] - } - return nil -} - -func mustTogether(left, right string, lValue, rValue interface{}) error { - if !isZero(lValue) && isZero(rValue) { - return fmt.Errorf("these options must be set together: '%s', '%s'", - left, right) - } - - return nil -} - -func isZero(value interface{}) bool { - switch v := value.(type) { - case string: - return v == "" - case []string: - return len(v) == 0 - case bool: - return !v - default: - return false - } -} - -// ApplyDefaults - any defaults changed here should be added to cmd.InitFlags as -// well for proper help/usage display. -func (c *Config) ApplyDefaults() { - if c.Stdout == nil { - c.Stdout = os.Stdout - } - if c.Stderr == nil { - c.Stderr = os.Stderr - } - if c.Stdin == nil { - c.Stdin = os.Stdin - } - - if c.InputDir != "" && c.OutputDir == "" && c.OutputMap == "" { - c.OutputDir = "." - } - if c.Input == "" && c.InputDir == "" && len(c.InputFiles) == 0 { - c.InputFiles = []string{"-"} - } - if c.OutputDir == "" && c.OutputMap == "" && len(c.OutputFiles) == 0 && !c.ExecPipe { - c.OutputFiles = []string{"-"} - } - if c.LDelim == "" { - c.LDelim = "{{" - } - if c.RDelim == "" { - c.RDelim = "}}" - } - if c.MissingKey == "" { - c.MissingKey = "error" - } - - if c.ExecPipe { - pipe := &bytes.Buffer{} - c.PostExecInput = pipe - c.OutputFiles = []string{"-"} - - // --exec-pipe redirects standard out to the out pipe - c.Stdout = pipe - } else { - c.PostExecInput = c.Stdin - } - - if c.PluginTimeout == 0 { - c.PluginTimeout = 5 * time.Second - } -} - -// GetMode - parse an os.FileMode out of the string, and let us know if it's an override or not... -func (c *Config) GetMode() (os.FileMode, bool, error) { - modeOverride := c.OutMode != "" - m, err := strconv.ParseUint("0"+c.OutMode, 8, 32) - if err != nil { - return 0, false, err - } - mode := iohelpers.NormalizeFileMode(os.FileMode(m)) - if mode == 0 && c.Input != "" { - mode = iohelpers.NormalizeFileMode(0o644) - } - return mode, modeOverride, nil -} - -// String - -func (c *Config) String() string { - out := &strings.Builder{} - out.WriteString("---\n") - enc := yaml.NewEncoder(out) - enc.SetIndent(2) - - // dereferenced copy so we can truncate input for display - c2 := *c - if len(c2.Input) >= 11 { - c2.Input = c2.Input[0:8] + "..." - } - - err := enc.Encode(c2) - if err != nil { - return err.Error() - } - return out.String() -} diff --git a/internal/config/configfile_test.go b/internal/config/configfile_test.go deleted file mode 100644 index 3cd1b109..00000000 --- a/internal/config/configfile_test.go +++ /dev/null @@ -1,873 +0,0 @@ -package config - -import ( - "net/http" - "net/url" - "runtime" - "strings" - "testing" - "time" - - "github.com/hairyhenderson/gomplate/v4/internal/iohelpers" - "github.com/hairyhenderson/yaml" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestParseConfigFile(t *testing.T) { - t.Parallel() - in := "in: hello world\n" - expected := &Config{ - Input: "hello world", - } - cf, err := Parse(strings.NewReader(in)) - require.NoError(t, err) - assert.Equal(t, expected, cf) - - in = `in: hello world -outputFiles: [out.txt] -chmod: 644 - -datasources: - data: - url: file:///data.json - moredata: - url: https://example.com/more.json - header: - Authorization: ["Bearer abcd1234"] - -context: - .: - url: file:///data.json - -plugins: - foo: - cmd: echo - pipe: true - -templates: - foo: - url: file:///tmp/foo.t - -pluginTimeout: 2s -` - expected = &Config{ - Input: "hello world", - OutputFiles: []string{"out.txt"}, - DataSources: map[string]DataSource{ - "data": { - URL: mustURL("file:///data.json"), - }, - "moredata": { - URL: mustURL("https://example.com/more.json"), - Header: map[string][]string{ - "Authorization": {"Bearer abcd1234"}, - }, - }, - }, - Context: map[string]DataSource{ - ".": { - URL: mustURL("file:///data.json"), - }, - }, - OutMode: "644", - Plugins: map[string]PluginConfig{ - "foo": {Cmd: "echo", Pipe: true}, - }, - Templates: Templates{"foo": DataSource{URL: mustURL("file:///tmp/foo.t")}}, - PluginTimeout: 2 * time.Second, - } - - cf, err = Parse(strings.NewReader(in)) - require.NoError(t, err) - assert.EqualValues(t, expected, cf) -} - -func mustURL(s string) *url.URL { - u, err := url.Parse(s) - if err != nil { - panic(err) - } - - return u -} - -func TestValidate(t *testing.T) { - t.Parallel() - require.NoError(t, validateConfig("")) - - require.Error(t, validateConfig(`in: foo -inputFiles: [bar] -`)) - require.Error(t, validateConfig(`inputDir: foo -inputFiles: [bar] -`)) - require.Error(t, validateConfig(`inputDir: foo -in: bar -`)) - - require.Error(t, validateConfig(`outputDir: foo -outputFiles: [bar] -`)) - - require.Error(t, validateConfig(`in: foo -outputFiles: [bar, baz] -`)) - - require.Error(t, validateConfig(`inputFiles: [foo] -outputFiles: [bar, baz] -`)) - - require.Error(t, validateConfig(`outputDir: foo -outputFiles: [bar] -`)) - - require.Error(t, validateConfig(`outputDir: foo -`)) - - require.Error(t, validateConfig(`outputMap: foo -`)) - - require.Error(t, validateConfig(`outputMap: foo -outputFiles: [bar] -`)) - - require.Error(t, validateConfig(`inputDir: foo -outputDir: bar -outputMap: bar -`)) - - require.Error(t, validateConfig(`execPipe: true -`)) - require.Error(t, validateConfig(`execPipe: true -postExec: "" -`)) - - require.NoError(t, validateConfig(`execPipe: true -postExec: [echo, foo] -`)) - - require.Error(t, validateConfig(`execPipe: true -outputFiles: [foo] -postExec: [echo] -`)) - - require.NoError(t, validateConfig(`execPipe: true -inputFiles: ['-'] -postExec: [echo] -`)) - - require.Error(t, validateConfig(`inputDir: foo -execPipe: true -outputDir: foo -postExec: [echo] -`)) - - require.Error(t, validateConfig(`inputDir: foo -execPipe: true -outputMap: foo -postExec: [echo] -`)) -} - -func validateConfig(c string) error { - in := strings.NewReader(c) - cfg, err := Parse(in) - if err != nil { - return err - } - err = cfg.Validate() - return err -} - -func TestMergeFrom(t *testing.T) { - t.Parallel() - cfg := &Config{ - Input: "hello world", - DataSources: map[string]DataSource{ - "data": { - URL: mustURL("file:///data.json"), - }, - "moredata": { - URL: mustURL("https://example.com/more.json"), - Header: http.Header{ - "Authorization": {"Bearer abcd1234"}, - }, - }, - }, - Context: map[string]DataSource{ - "foo": { - URL: mustURL("https://example.com/foo.yaml"), - Header: http.Header{ - "Accept": {"application/yaml"}, - }, - }, - }, - OutMode: "644", - } - other := &Config{ - OutputFiles: []string{"out.txt"}, - DataSources: map[string]DataSource{ - "data": { - Header: http.Header{ - "Accept": {"foo/bar"}, - }, - }, - }, - Context: map[string]DataSource{ - "foo": { - Header: http.Header{ - "Accept": {"application/json"}, - }, - }, - "bar": {URL: mustURL("stdin:///")}, - }, - } - - expected := &Config{ - Input: "hello world", - OutputFiles: []string{"out.txt"}, - DataSources: map[string]DataSource{ - "data": { - URL: mustURL("file:///data.json"), - Header: http.Header{ - "Accept": {"foo/bar"}, - }, - }, - "moredata": { - URL: mustURL("https://example.com/more.json"), - Header: http.Header{ - "Authorization": {"Bearer abcd1234"}, - }, - }, - }, - Context: map[string]DataSource{ - "foo": { - URL: mustURL("https://example.com/foo.yaml"), - Header: http.Header{ - "Accept": {"application/json"}, - }, - }, - "bar": {URL: mustURL("stdin:///")}, - }, - OutMode: "644", - } - - assert.EqualValues(t, expected, cfg.MergeFrom(other)) - - cfg = &Config{ - Input: "hello world", - } - other = &Config{ - InputFiles: []string{"in.tmpl", "in2.tmpl"}, - OutputFiles: []string{"out", "out2"}, - } - expected = &Config{ - InputFiles: []string{"in.tmpl", "in2.tmpl"}, - OutputFiles: []string{"out", "out2"}, - } - - assert.EqualValues(t, expected, cfg.MergeFrom(other)) - - cfg = &Config{ - Input: "hello world", - OutputFiles: []string{"out", "out2"}, - } - other = &Config{ - InputDir: "in/", - OutputDir: "out/", - } - expected = &Config{ - InputDir: "in/", - OutputDir: "out/", - } - - assert.EqualValues(t, expected, cfg.MergeFrom(other)) - - cfg = &Config{ - Input: "hello world", - OutputFiles: []string{"out"}, - } - other = &Config{ - Input: "hi", - ExecPipe: true, - PostExec: []string{"cat"}, - } - expected = &Config{ - Input: "hi", - ExecPipe: true, - PostExec: []string{"cat"}, - } - - assert.EqualValues(t, expected, cfg.MergeFrom(other)) - - cfg = &Config{ - Input: "hello world", - OutputFiles: []string{"-"}, - Plugins: map[string]PluginConfig{ - "sleep": {Cmd: "echo"}, - }, - PluginTimeout: 500 * time.Microsecond, - } - other = &Config{ - InputFiles: []string{"-"}, - OutputFiles: []string{"-"}, - Plugins: map[string]PluginConfig{ - "sleep": {Cmd: "sleep.sh"}, - }, - } - expected = &Config{ - Input: "hello world", - OutputFiles: []string{"-"}, - Plugins: map[string]PluginConfig{ - "sleep": {Cmd: "sleep.sh"}, - }, - PluginTimeout: 500 * time.Microsecond, - } - - assert.EqualValues(t, expected, cfg.MergeFrom(other)) - - cfg = &Config{ - Input: "hello world", - OutMode: "644", - } - other = &Config{ - OutputFiles: []string{"out.txt"}, - Context: map[string]DataSource{ - "foo": { - URL: mustURL("https://example.com/foo.yaml"), - Header: http.Header{ - "Accept": {"application/json"}, - }, - }, - "bar": {URL: mustURL("stdin:///")}, - }, - DataSources: map[string]DataSource{ - "data": { - URL: mustURL("file:///data.json"), - }, - "moredata": { - URL: mustURL("https://example.com/more.json"), - Header: http.Header{ - "Authorization": {"Bearer abcd1234"}, - }, - }, - }, - } - expected = &Config{ - Input: "hello world", - OutputFiles: []string{"out.txt"}, - Context: map[string]DataSource{ - "foo": { - URL: mustURL("https://example.com/foo.yaml"), - Header: http.Header{ - "Accept": {"application/json"}, - }, - }, - "bar": {URL: mustURL("stdin:///")}, - }, - DataSources: map[string]DataSource{ - "data": { - URL: mustURL("file:///data.json"), - }, - "moredata": { - URL: mustURL("https://example.com/more.json"), - Header: http.Header{ - "Authorization": {"Bearer abcd1234"}, - }, - }, - }, - OutMode: "644", - } - - assert.EqualValues(t, expected, cfg.MergeFrom(other)) - - // test template merging & a few other things - cfg = &Config{ - InputDir: "indir/", - ExcludeGlob: []string{"*.txt"}, - Templates: Templates{ - "foo": { - URL: mustURL("file:///foo.yaml"), - }, - "bar": { - URL: mustURL("stdin:///"), - Header: http.Header{"Accept": {"application/json"}}, - }, - }, - } - other = &Config{ - ExcludeGlob: []string{"*.yaml"}, - OutputMap: "${ .in }.out", - OutMode: "600", - LDelim: "${", - RDelim: "}", - Templates: Templates{ - "foo": {URL: mustURL("https://example.com/foo.yaml")}, - "baz": {URL: mustURL("vault:///baz")}, - }, - } - expected = &Config{ - InputDir: "indir/", - ExcludeGlob: []string{"*.yaml"}, - OutputMap: "${ .in }.out", - OutMode: "600", - LDelim: "${", - RDelim: "}", - Templates: Templates{ - "foo": {URL: mustURL("https://example.com/foo.yaml")}, - "bar": { - URL: mustURL("stdin:///"), - Header: http.Header{"Accept": {"application/json"}}, - }, - "baz": {URL: mustURL("vault:///baz")}, - }, - } - - assert.EqualValues(t, expected, cfg.MergeFrom(other)) -} - -func TestParseDataSourceFlags(t *testing.T) { - t.Parallel() - cfg := &Config{} - err := cfg.ParseDataSourceFlags(nil, nil, nil, nil) - require.NoError(t, err) - assert.EqualValues(t, &Config{}, cfg) - - cfg = &Config{} - err = cfg.ParseDataSourceFlags([]string{"foo/bar/baz.json"}, nil, nil, nil) - require.Error(t, err) - - cfg = &Config{} - err = cfg.ParseDataSourceFlags([]string{"baz=foo/bar/baz.json"}, nil, nil, nil) - require.NoError(t, err) - expected := &Config{ - DataSources: map[string]DataSource{ - "baz": {URL: mustURL("foo/bar/baz.json")}, - }, - } - assert.EqualValues(t, expected, cfg, "expected: %+v\nactual: %+v\n", expected, cfg) - - cfg = &Config{} - err = cfg.ParseDataSourceFlags( - []string{"baz=foo/bar/baz.json"}, - nil, - nil, - []string{"baz=Accept: application/json"}) - require.NoError(t, err) - assert.EqualValues(t, &Config{ - DataSources: map[string]DataSource{ - "baz": { - URL: mustURL("foo/bar/baz.json"), - Header: http.Header{ - "Accept": {"application/json"}, - }, - }, - }, - }, cfg) - - cfg = &Config{} - err = cfg.ParseDataSourceFlags( - []string{"baz=foo/bar/baz.json"}, - []string{"foo=http://example.com"}, - nil, - []string{ - "foo=Accept: application/json", - "bar=Authorization: Basic xxxxx", - }, - ) - require.NoError(t, err) - assert.EqualValues(t, &Config{ - DataSources: map[string]DataSource{ - "baz": {URL: mustURL("foo/bar/baz.json")}, - }, - Context: map[string]DataSource{ - "foo": { - URL: mustURL("http://example.com"), - Header: http.Header{ - "Accept": {"application/json"}, - }, - }, - }, - ExtraHeaders: map[string]http.Header{ - "bar": {"Authorization": {"Basic xxxxx"}}, - }, - }, cfg) - - cfg = &Config{} - err = cfg.ParseDataSourceFlags( - nil, - nil, - []string{"foo=http://example.com", "file.tmpl", "tmpldir/"}, - []string{"foo=Accept: application/json", "bar=Authorization: Basic xxxxx"}, - ) - require.NoError(t, err) - assert.EqualValues(t, &Config{ - Templates: Templates{ - "foo": { - URL: mustURL("http://example.com"), - Header: http.Header{"Accept": {"application/json"}}, - }, - "file.tmpl": {URL: mustURL("file.tmpl")}, - "tmpldir/": {URL: mustURL("tmpldir/")}, - }, - ExtraHeaders: map[string]http.Header{ - "bar": {"Authorization": {"Basic xxxxx"}}, - }, - }, cfg) -} - -func TestParsePluginFlags(t *testing.T) { - t.Parallel() - cfg := &Config{} - err := cfg.ParsePluginFlags(nil) - require.NoError(t, err) - - cfg = &Config{} - err = cfg.ParsePluginFlags([]string{"foo=bar"}) - require.NoError(t, err) - assert.EqualValues(t, &Config{Plugins: map[string]PluginConfig{"foo": {Cmd: "bar"}}}, cfg) -} - -func TestConfig_String(t *testing.T) { - t.Run("defaults", func(t *testing.T) { - c := &Config{} - c.ApplyDefaults() - - expected := `--- -inputFiles: ['-'] -outputFiles: ['-'] -leftDelim: '{{' -rightDelim: '}}' -missingKey: error -pluginTimeout: 5s -` - assert.Equal(t, expected, c.String()) - }) - - t.Run("overridden values", func(t *testing.T) { - c := &Config{ - LDelim: "L", - RDelim: "R", - Input: "foo", - OutputFiles: []string{"-"}, - Templates: Templates{ - "foo": {URL: mustURL("https://www.example.com/foo.tmpl")}, - "bar": {URL: mustURL("file:///tmp/bar.t")}, - }, - } - expected := `--- -in: foo -outputFiles: ['-'] -leftDelim: L -rightDelim: R -templates: - foo: - url: https://www.example.com/foo.tmpl - bar: - url: file:///tmp/bar.t -` - assert.YAMLEq(t, expected, c.String()) - }) - - t.Run("long input", func(t *testing.T) { - c := &Config{ - LDelim: "L", - RDelim: "R", - Input: "long input that should be truncated", - OutputFiles: []string{"-"}, - Templates: Templates{ - "foo": {URL: mustURL("https://www.example.com/foo.tmpl")}, - "bar": {URL: mustURL("file:///tmp/bar.t")}, - }, - } - expected := `--- -in: long inp... -outputFiles: ['-'] -leftDelim: L -rightDelim: R -templates: - foo: - url: https://www.example.com/foo.tmpl - bar: - url: file:///tmp/bar.t -` - assert.YAMLEq(t, expected, c.String()) - }) - - t.Run("relative dirs", func(t *testing.T) { - c := &Config{ - InputDir: "in/", - OutputDir: "out/", - } - expected := `--- -inputDir: in/ -outputDir: out/ -` - assert.YAMLEq(t, expected, c.String()) - }) - - t.Run("outputmap", func(t *testing.T) { - c := &Config{ - InputDir: "in/", - OutputMap: "{{ .in }}", - } - expected := `--- -inputDir: in/ -outputMap: '{{ .in }}' -` - - assert.YAMLEq(t, expected, c.String()) - }) - - t.Run("pluginTimeout", func(t *testing.T) { - c := &Config{ - PluginTimeout: 500 * time.Millisecond, - } - expected := `--- -pluginTimeout: 500ms -` - - assert.YAMLEq(t, expected, c.String()) - }) - - t.Run("plugins", func(t *testing.T) { - c := &Config{ - Plugins: map[string]PluginConfig{ - "foo": { - Cmd: "bar", - Timeout: 1 * time.Second, - Pipe: true, - }, - }, - } - expected := `--- -plugins: - foo: - cmd: bar - timeout: 1s - pipe: true -` - - assert.YAMLEq(t, expected, c.String()) - }) -} - -func TestApplyDefaults(t *testing.T) { - t.Parallel() - cfg := &Config{} - - cfg.ApplyDefaults() - assert.EqualValues(t, []string{"-"}, cfg.InputFiles) - assert.EqualValues(t, []string{"-"}, cfg.OutputFiles) - assert.Empty(t, cfg.OutputDir) - assert.Equal(t, "{{", cfg.LDelim) - assert.Equal(t, "}}", cfg.RDelim) - - cfg = &Config{ - InputDir: "in", - } - - cfg.ApplyDefaults() - assert.Empty(t, cfg.InputFiles) - assert.Empty(t, cfg.OutputFiles) - assert.Equal(t, ".", cfg.OutputDir) - assert.Equal(t, "{{", cfg.LDelim) - assert.Equal(t, "}}", cfg.RDelim) - - cfg = &Config{ - Input: "foo", - LDelim: "<", - RDelim: ">", - } - - cfg.ApplyDefaults() - assert.Empty(t, cfg.InputFiles) - assert.EqualValues(t, []string{"-"}, cfg.OutputFiles) - assert.Empty(t, cfg.OutputDir) - assert.Equal(t, "<", cfg.LDelim) - assert.Equal(t, ">", cfg.RDelim) - - cfg = &Config{ - Input: "foo", - ExecPipe: true, - } - - cfg.ApplyDefaults() - assert.Empty(t, cfg.InputFiles) - assert.EqualValues(t, []string{"-"}, cfg.OutputFiles) - assert.Empty(t, cfg.OutputDir) - assert.True(t, cfg.ExecPipe) - - cfg = &Config{ - InputDir: "foo", - OutputMap: "bar", - } - - cfg.ApplyDefaults() - assert.Empty(t, cfg.InputFiles) - assert.Empty(t, cfg.Input) - assert.Empty(t, cfg.OutputFiles) - assert.Empty(t, cfg.OutputDir) - assert.False(t, cfg.ExecPipe) - assert.Equal(t, "bar", cfg.OutputMap) -} - -func TestGetMode(t *testing.T) { - c := &Config{} - m, o, err := c.GetMode() - require.NoError(t, err) - assert.Equal(t, iohelpers.NormalizeFileMode(0), m) - assert.False(t, o) - - c = &Config{OutMode: "755"} - m, o, err = c.GetMode() - require.NoError(t, err) - assert.Equal(t, iohelpers.NormalizeFileMode(0o755), m) - assert.True(t, o) - - c = &Config{OutMode: "0755"} - m, o, err = c.GetMode() - require.NoError(t, err) - assert.Equal(t, iohelpers.NormalizeFileMode(0o755), m) - assert.True(t, o) - - c = &Config{OutMode: "foo"} - _, _, err = c.GetMode() - require.Error(t, err) -} - -func TestParseHeaderArgs(t *testing.T) { - args := []string{ - "foo=Accept: application/json", - "bar=Authorization: Bearer supersecret", - } - expected := map[string]http.Header{ - "foo": { - "Accept": {"application/json"}, - }, - "bar": { - "Authorization": {"Bearer supersecret"}, - }, - } - parsed, err := parseHeaderArgs(args) - require.NoError(t, err) - assert.Equal(t, expected, parsed) - - _, err = parseHeaderArgs([]string{"foo"}) - require.Error(t, err) - - _, err = parseHeaderArgs([]string{"foo=bar"}) - require.Error(t, err) - - args = []string{ - "foo=Accept: application/json", - "foo=Foo: bar", - "foo=foo: baz", - "foo=fOO: qux", - "bar=Authorization: Bearer supersecret", - } - expected = map[string]http.Header{ - "foo": { - "Accept": {"application/json"}, - "Foo": {"bar", "baz", "qux"}, - }, - "bar": { - "Authorization": {"Bearer supersecret"}, - }, - } - parsed, err = parseHeaderArgs(args) - require.NoError(t, err) - assert.Equal(t, expected, parsed) -} - -func TestParseDatasourceArgNoAlias(t *testing.T) { - alias, ds, err := parseDatasourceArg("foo.json") - require.NoError(t, err) - assert.Equal(t, "foo", alias) - assert.Empty(t, ds.URL.Scheme) - - _, _, err = parseDatasourceArg("../foo.json") - require.Error(t, err) - - _, _, err = parseDatasourceArg("ftp://example.com/foo.yml") - require.Error(t, err) -} - -func TestParseDatasourceArgWithAlias(t *testing.T) { - alias, ds, err := parseDatasourceArg("data=foo.json") - require.NoError(t, err) - assert.Equal(t, "data", alias) - assert.EqualValues(t, &url.URL{Path: "foo.json"}, ds.URL) - - alias, ds, err = parseDatasourceArg("data=/otherdir/foo.json") - require.NoError(t, err) - assert.Equal(t, "data", alias) - assert.Equal(t, "file", ds.URL.Scheme) - assert.True(t, ds.URL.IsAbs()) - assert.Equal(t, "/otherdir/foo.json", ds.URL.Path) - - if runtime.GOOS == "windows" { - alias, ds, err = parseDatasourceArg("data=foo.json") - require.NoError(t, err) - assert.Equal(t, "data", alias) - assert.EqualValues(t, &url.URL{Path: "foo.json"}, ds.URL) - - alias, ds, err = parseDatasourceArg(`data=\otherdir\foo.json`) - require.NoError(t, err) - assert.Equal(t, "data", alias) - assert.EqualValues(t, &url.URL{Scheme: "file", Path: "/otherdir/foo.json"}, ds.URL) - - alias, ds, err = parseDatasourceArg("data=C:\\windowsdir\\foo.json") - require.NoError(t, err) - assert.Equal(t, "data", alias) - assert.EqualValues(t, &url.URL{Scheme: "file", Path: "C:/windowsdir/foo.json"}, ds.URL) - - alias, ds, err = parseDatasourceArg("data=\\\\somehost\\share\\foo.json") - require.NoError(t, err) - assert.Equal(t, "data", alias) - assert.EqualValues(t, &url.URL{Scheme: "file", Host: "somehost", Path: "/share/foo.json"}, ds.URL) - } - - alias, ds, err = parseDatasourceArg("data=sftp://example.com/blahblah/foo.json") - require.NoError(t, err) - assert.Equal(t, "data", alias) - assert.EqualValues(t, &url.URL{Scheme: "sftp", Host: "example.com", Path: "/blahblah/foo.json"}, ds.URL) - - alias, ds, err = parseDatasourceArg("merged=merge:./foo.yaml|http://example.com/bar.json%3Ffoo=bar") - require.NoError(t, err) - assert.Equal(t, "merged", alias) - assert.EqualValues(t, &url.URL{Scheme: "merge", Opaque: "./foo.yaml|http://example.com/bar.json%3Ffoo=bar"}, ds.URL) -} - -func TestPluginConfig_UnmarshalYAML(t *testing.T) { - in := `foo` - out := PluginConfig{} - err := yaml.Unmarshal([]byte(in), &out) - require.NoError(t, err) - assert.EqualValues(t, PluginConfig{Cmd: "foo"}, out) - - in = `[foo, bar]` - out = PluginConfig{} - err = yaml.Unmarshal([]byte(in), &out) - require.Error(t, err) - - in = `cmd: foo` - out = PluginConfig{} - err = yaml.Unmarshal([]byte(in), &out) - require.NoError(t, err) - assert.EqualValues(t, PluginConfig{Cmd: "foo"}, out) - - in = `cmd: foo -timeout: 10ms -pipe: true -` - out = PluginConfig{} - err = yaml.Unmarshal([]byte(in), &out) - require.NoError(t, err) - assert.EqualValues(t, PluginConfig{ - Cmd: "foo", - Timeout: time.Duration(10) * time.Millisecond, - Pipe: true, - }, out) -} diff --git a/internal/config/types.go b/internal/config/types.go index 6c38dc6a..022363f7 100644 --- a/internal/config/types.go +++ b/internal/config/types.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "net/http" + "net/url" "strings" "github.com/hairyhenderson/gomplate/v4/internal/deprecated" @@ -88,13 +89,58 @@ func (t Templates) MarshalYAML() (interface{}, error) { return m, nil } -func parseTemplateArg(value string) (alias string, ds DataSource, err error) { - alias, u, _ := strings.Cut(value, "=") - if u == "" { - u = alias - } +type experimentalCtxKey struct{} + +func SetExperimental(ctx context.Context) context.Context { + return context.WithValue(ctx, experimentalCtxKey{}, true) +} + +func ExperimentalEnabled(ctx context.Context) bool { + v, ok := ctx.Value(experimentalCtxKey{}).(bool) + return ok && v +} - ds.URL, err = urlhelpers.ParseSourceURL(u) +// DataSource - datasource configuration +// +// defined in this package to avoid cyclic dependencies +type DataSource struct { + URL *url.URL `yaml:"-"` + Header http.Header `yaml:"header,omitempty,flow"` +} + +// UnmarshalYAML - satisfy the yaml.Umarshaler interface - URLs aren't +// well supported, and anyway we need to do some extra parsing +func (d *DataSource) UnmarshalYAML(value *yaml.Node) error { + type raw struct { + Header http.Header + URL string + } + r := raw{} + err := value.Decode(&r) + if err != nil { + return err + } + u, err := urlhelpers.ParseSourceURL(r.URL) + if err != nil { + return fmt.Errorf("could not parse datasource URL %q: %w", r.URL, err) + } + *d = DataSource{ + URL: u, + Header: r.Header, + } + return nil +} - return alias, ds, err +// MarshalYAML - satisfy the yaml.Marshaler interface - URLs aren't +// well supported, and anyway we need to do some extra parsing +func (d DataSource) MarshalYAML() (interface{}, error) { + type raw struct { + Header http.Header + URL string + } + r := raw{ + URL: d.URL.String(), + Header: d.Header, + } + return r, nil } diff --git a/internal/config/types_test.go b/internal/config/types_test.go deleted file mode 100644 index 1ee6c4fe..00000000 --- a/internal/config/types_test.go +++ /dev/null @@ -1,91 +0,0 @@ -package config - -import ( - "net/http" - "testing" - - "github.com/hairyhenderson/yaml" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestTemplates_UnmarshalYAML(t *testing.T) { - in := `t: - url: foo/bar/helloworld.tmpl -templatedir: - url: templatedir/ -dir: - url: foo/bar/ -mytemplate.t: - url: mytemplate.t -remote: - url: https://example.com/foo/bar/helloworld.tmpl - header: - Accept: [text/plain, text/template]` - out := Templates{} - err := yaml.Unmarshal([]byte(in), &out) - require.NoError(t, err) - assert.EqualValues(t, Templates{ - "t": {URL: mustURL("foo/bar/helloworld.tmpl")}, - "templatedir": {URL: mustURL("templatedir/")}, - "dir": {URL: mustURL("foo/bar/")}, - "mytemplate.t": {URL: mustURL("mytemplate.t")}, - "remote": { - URL: mustURL("https://example.com/foo/bar/helloworld.tmpl"), - Header: http.Header{"Accept": {"text/plain", "text/template"}}, - }, - }, out) - - // legacy array format - in = `- t=foo/bar/helloworld.tmpl -- templatedir/ -- dir=foo/bar/ -- mytemplate.t -- remote=https://example.com/foo/bar/helloworld.tmpl` - out = Templates{} - err = yaml.Unmarshal([]byte(in), &out) - require.NoError(t, err) - assert.EqualValues(t, Templates{ - "t": {URL: mustURL("foo/bar/helloworld.tmpl")}, - "templatedir/": {URL: mustURL("templatedir/")}, - "dir": {URL: mustURL("foo/bar/")}, - "mytemplate.t": {URL: mustURL("mytemplate.t")}, - "remote": {URL: mustURL("https://example.com/foo/bar/helloworld.tmpl")}, - }, out) - - // invalid format - in = `"neither an array nor a map"` - out = Templates{} - err = yaml.Unmarshal([]byte(in), &out) - require.Error(t, err) - - // invalid URL - in = `- t="not a:valid url"` - out = Templates{} - err = yaml.Unmarshal([]byte(in), &out) - require.Error(t, err) -} - -func TestParseTemplateArg(t *testing.T) { - data := []struct { - ds DataSource - in string - alias string - }{ - {in: "t=foo/bar/helloworld.tmpl", alias: "t", ds: DataSource{URL: mustURL("foo/bar/helloworld.tmpl")}}, - {in: "templatedir/", alias: "templatedir/", ds: DataSource{URL: mustURL("templatedir/")}}, - {in: "dir=foo/bar/", alias: "dir", ds: DataSource{URL: mustURL("foo/bar/")}}, - {in: "mytemplate.t", alias: "mytemplate.t", ds: DataSource{URL: mustURL("mytemplate.t")}}, - { - in: "remote=https://example.com/foo/bar/helloworld.tmpl", - alias: "remote", ds: DataSource{URL: mustURL("https://example.com/foo/bar/helloworld.tmpl")}, - }, - } - - for _, d := range data { - alias, ds, err := parseTemplateArg(d.in) - require.NoError(t, err) - assert.Equal(t, d.alias, alias) - assert.EqualValues(t, d.ds, ds) - } -} diff --git a/internal/datafs/context.go b/internal/datafs/context.go index 30722f2b..310d4445 100644 --- a/internal/datafs/context.go +++ b/internal/datafs/context.go @@ -30,10 +30,15 @@ func WithDataSourceRegistryFS(registry Registry, fsys fs.FS) fs.FS { type stdinCtxKey struct{} +// ContextWithStdin injects an [io.Reader] into the context, which can be used +// to override the default stdin. func ContextWithStdin(ctx context.Context, r io.Reader) context.Context { return context.WithValue(ctx, stdinCtxKey{}, r) } +// StdinFromContext returns the io.Reader that should be used for stdin as +// injected by [ContextWithStdin]. If no reader has been injected, [os.Stdin] is +// returned. func StdinFromContext(ctx context.Context) io.Reader { if r, ok := ctx.Value(stdinCtxKey{}).(io.Reader); ok { return r diff --git a/plugins.go b/plugins.go index 7509b97c..fd2882ab 100644 --- a/plugins.go +++ b/plugins.go @@ -14,14 +14,13 @@ import ( "time" "github.com/hairyhenderson/gomplate/v4/conv" - "github.com/hairyhenderson/gomplate/v4/internal/config" ) // bindPlugins creates custom plugin functions for each plugin specified by // the config, and adds them to the given funcMap. Uses the configuration's // PluginTimeout as the default plugin Timeout. Errors if a function name is // duplicated. -func bindPlugins(ctx context.Context, cfg *config.Config, funcMap template.FuncMap) error { +func bindPlugins(ctx context.Context, cfg *Config, funcMap template.FuncMap) error { for k, v := range cfg.Plugins { if _, ok := funcMap[k]; ok { return fmt.Errorf("function %q is already bound, and can not be overridden", k) diff --git a/plugins_test.go b/plugins_test.go index b3257859..e919e684 100644 --- a/plugins_test.go +++ b/plugins_test.go @@ -10,7 +10,6 @@ import ( "text/template" "time" - "github.com/hairyhenderson/gomplate/v4/internal/config" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -18,14 +17,14 @@ import ( func TestBindPlugins(t *testing.T) { ctx := context.Background() fm := template.FuncMap{} - cfg := &config.Config{ - Plugins: map[string]config.PluginConfig{}, + cfg := &Config{ + Plugins: map[string]PluginConfig{}, } err := bindPlugins(ctx, cfg, fm) require.NoError(t, err) assert.EqualValues(t, template.FuncMap{}, fm) - cfg.Plugins = map[string]config.PluginConfig{"foo": {Cmd: "bar"}} + cfg.Plugins = map[string]PluginConfig{"foo": {Cmd: "bar"}} err = bindPlugins(ctx, cfg, fm) require.NoError(t, err) assert.Contains(t, fm, "foo") diff --git a/render.go b/render.go index cf55e06e..d5e74be7 100644 --- a/render.go +++ b/render.go @@ -6,7 +6,6 @@ import ( "io" "io/fs" "net/http" - "net/url" "path" "slices" "strings" @@ -16,27 +15,21 @@ import ( "github.com/hairyhenderson/go-fsimpl" "github.com/hairyhenderson/go-fsimpl/autofs" - "github.com/hairyhenderson/gomplate/v4/internal/config" "github.com/hairyhenderson/gomplate/v4/internal/datafs" "github.com/hairyhenderson/gomplate/v4/internal/funcs" ) -// Options for template rendering. -// -// Experimental: subject to breaking changes before the next major release -type Options struct { - // FSProvider - allows lookups of data source filesystems. Defaults to - // [DefaultFSProvider]. - FSProvider fsimpl.FSProvider - +// RenderOptions - options for controlling how templates are rendered, and +// what data are available. +type RenderOptions struct { // Datasources - map of datasources to be read on demand when the // 'datasource'/'ds'/'include' functions are used. - Datasources map[string]Datasource + Datasources map[string]DataSource // Context - map of datasources to be read immediately and added to the // template's context - Context map[string]Datasource + Context map[string]DataSource // Templates - map of templates that can be referenced as nested templates - Templates map[string]Datasource + Templates map[string]DataSource // Extra HTTP headers not attached to pre-defined datsources. Potentially // used by datasources defined in the template. @@ -55,62 +48,27 @@ type Options struct { // MissingKey controls the behavior during execution if a map is indexed with a key that is not present in the map MissingKey string - - // Experimental - enable experimental features - Experimental bool } -// optionsFromConfig - create a set of options from the internal config struct. +// optionsFromConfig - translate the internal config struct to a RenderOptions. // Does not set the Funcs field. -func optionsFromConfig(cfg *config.Config) Options { - ds := make(map[string]Datasource, len(cfg.DataSources)) - for k, v := range cfg.DataSources { - ds[k] = Datasource{ - URL: v.URL, - Header: v.Header, - } - } - cs := make(map[string]Datasource, len(cfg.Context)) - for k, v := range cfg.Context { - cs[k] = Datasource{ - URL: v.URL, - Header: v.Header, - } - } - ts := make(map[string]Datasource, len(cfg.Templates)) - for k, v := range cfg.Templates { - ts[k] = Datasource{ - URL: v.URL, - Header: v.Header, - } - } - - opts := Options{ - Datasources: ds, - Context: cs, - Templates: ts, +func optionsFromConfig(cfg *Config) RenderOptions { + opts := RenderOptions{ + Datasources: cfg.DataSources, + Context: cfg.Context, + Templates: cfg.Templates, ExtraHeaders: cfg.ExtraHeaders, LDelim: cfg.LDelim, RDelim: cfg.RDelim, MissingKey: cfg.MissingKey, - Experimental: cfg.Experimental, } return opts } -// Datasource - a datasource URL with optional headers -// -// Experimental: subject to breaking changes before the next major release -type Datasource struct { - URL *url.URL - Header http.Header -} - type renderer struct { sr datafs.DataSourceReader - fsp fsimpl.FSProvider - nested config.Templates + nested map[string]DataSource funcs template.FuncMap lDelim string rDelim string @@ -120,8 +78,6 @@ type renderer struct { // Renderer provides gomplate's core template rendering functionality. // See [NewRenderer]. -// -// Experimental: subject to breaking changes before the next major release type Renderer interface { // RenderTemplates renders a list of templates, parsing each template's // Text and executing it, outputting to its Writer. If a template's Writer @@ -140,11 +96,11 @@ type Renderer interface { // use. // // Experimental: subject to breaking changes before the next major release -func NewRenderer(opts Options) Renderer { +func NewRenderer(opts RenderOptions) Renderer { return newRenderer(opts) } -func newRenderer(opts Options) *renderer { +func newRenderer(opts RenderOptions) *renderer { if Metrics == nil { Metrics = newMetrics() } @@ -156,23 +112,23 @@ func newRenderer(opts Options) *renderer { for alias, ds := range opts.Context { tctxAliases = append(tctxAliases, alias) - reg.Register(alias, config.DataSource{ + reg.Register(alias, DataSource{ URL: ds.URL, Header: ds.Header, }) } for alias, ds := range opts.Datasources { - reg.Register(alias, config.DataSource{ + reg.Register(alias, DataSource{ URL: ds.URL, Header: ds.Header, }) } - // convert the internal config.Templates to a map[string]Datasource - // TODO: simplify when config.Templates is removed - nested := config.Templates{} + // convert the internal Templates to a map[string]Datasource + // TODO: simplify when Templates is removed + nested := map[string]DataSource{} for alias, ds := range opts.Templates { - nested[alias] = config.DataSource{ + nested[alias] = DataSource{ URL: ds.URL, Header: ds.Header, } @@ -191,28 +147,20 @@ func newRenderer(opts Options) *renderer { missingKey = "error" } - if opts.FSProvider == nil { - opts.FSProvider = DefaultFSProvider - } - - // TODO: move this in? sr := datafs.NewSourceReader(reg) return &renderer{ - nested: nested, + nested: opts.Templates, sr: sr, funcs: opts.Funcs, tctxAliases: tctxAliases, lDelim: opts.LDelim, rDelim: opts.RDelim, missingKey: missingKey, - fsp: opts.FSProvider, } } // Template contains the basic data needed to render a template with a Renderer -// -// Experimental: subject to breaking changes before the next major release type Template struct { // Writer is the writer to output the rendered template to. If this writer // is a non-os.Stdout io.Closer, it will be closed after the template is @@ -224,37 +172,37 @@ type Template struct { Text string } -func (t *renderer) RenderTemplates(ctx context.Context, templates []Template) error { +func (r *renderer) RenderTemplates(ctx context.Context, templates []Template) error { if datafs.FSProviderFromContext(ctx) == nil { - ctx = datafs.ContextWithFSProvider(ctx, t.fsp) + ctx = datafs.ContextWithFSProvider(ctx, DefaultFSProvider) } // configure the template context with the refreshed Data value // only done here because the data context may have changed - tmplctx, err := createTmplContext(ctx, t.tctxAliases, t.sr) + tmplctx, err := createTmplContext(ctx, r.tctxAliases, r.sr) if err != nil { return err } - return t.renderTemplatesWithData(ctx, templates, tmplctx) + return r.renderTemplatesWithData(ctx, templates, tmplctx) } -func (t *renderer) renderTemplatesWithData(ctx context.Context, templates []Template, tmplctx interface{}) error { +func (r *renderer) renderTemplatesWithData(ctx context.Context, templates []Template, tmplctx interface{}) error { // update funcs with the current context // only done here to ensure the context is properly set in func namespaces f := CreateFuncs(ctx) // add datasource funcs here because they need to share the source reader - addToMap(f, funcs.CreateDataSourceFuncs(ctx, t.sr)) + addToMap(f, funcs.CreateDataSourceFuncs(ctx, r.sr)) // add user-defined funcs last so they override the built-in funcs - addToMap(f, t.funcs) + addToMap(f, r.funcs) // track some metrics for debug output start := time.Now() defer func() { Metrics.TotalRenderDuration = time.Since(start) }() for _, template := range templates { - err := t.renderTemplate(ctx, template, f, tmplctx) + err := r.renderTemplate(ctx, template, f, tmplctx) if err != nil { return fmt.Errorf("renderTemplate: %w", err) } @@ -262,7 +210,7 @@ func (t *renderer) renderTemplatesWithData(ctx context.Context, templates []Temp return nil } -func (t *renderer) renderTemplate(ctx context.Context, template Template, f template.FuncMap, tmplctx interface{}) error { +func (r *renderer) renderTemplate(ctx context.Context, template Template, f template.FuncMap, tmplctx interface{}) error { if template.Writer != nil { if wr, ok := template.Writer.(io.Closer); ok { defer wr.Close() @@ -270,7 +218,7 @@ func (t *renderer) renderTemplate(ctx context.Context, template Template, f temp } tstart := time.Now() - tmpl, err := t.parseTemplate(ctx, template.Name, template.Text, f, tmplctx) + tmpl, err := r.parseTemplate(ctx, template.Name, template.Text, f, tmplctx) if err != nil { return fmt.Errorf("parse template %s: %w", template.Name, err) } @@ -286,17 +234,17 @@ func (t *renderer) renderTemplate(ctx context.Context, template Template, f temp return nil } -func (t *renderer) Render(ctx context.Context, name, text string, wr io.Writer) error { - return t.RenderTemplates(ctx, []Template{ +func (r *renderer) Render(ctx context.Context, name, text string, wr io.Writer) error { + return r.RenderTemplates(ctx, []Template{ {Name: name, Text: text, Writer: wr}, }) } // parseTemplate - parses text as a Go template with the given name and options -func (t *renderer) parseTemplate(ctx context.Context, name, text string, funcs template.FuncMap, tmplctx interface{}) (tmpl *template.Template, err error) { +func (r *renderer) parseTemplate(ctx context.Context, name, text string, funcs template.FuncMap, tmplctx interface{}) (tmpl *template.Template, err error) { tmpl = template.New(name) - missingKey := t.missingKey + missingKey := r.missingKey if missingKey == "" { missingKey = "error" } @@ -313,13 +261,13 @@ func (t *renderer) parseTemplate(ctx context.Context, name, text string, funcs t // the "tmpl" funcs get added here because they need access to the root template and context addTmplFuncs(funcMap, tmpl, tmplctx, name) tmpl.Funcs(funcMap) - tmpl.Delims(t.lDelim, t.rDelim) + tmpl.Delims(r.lDelim, r.rDelim) _, err = tmpl.Parse(text) if err != nil { return nil, err } - err = t.parseNestedTemplates(ctx, tmpl) + err = r.parseNestedTemplates(ctx, tmpl) if err != nil { return nil, fmt.Errorf("parse nested templates: %w", err) } @@ -327,10 +275,10 @@ func (t *renderer) parseTemplate(ctx context.Context, name, text string, funcs t return tmpl, nil } -func (t *renderer) parseNestedTemplates(ctx context.Context, tmpl *template.Template) error { +func (r *renderer) parseNestedTemplates(ctx context.Context, tmpl *template.Template) error { fsp := datafs.FSProviderFromContext(ctx) - for alias, n := range t.nested { + for alias, n := range r.nested { u := *n.URL fname := path.Base(u.Path) @@ -361,7 +309,7 @@ func (t *renderer) parseNestedTemplates(ctx context.Context, tmpl *template.Temp // inject context & header in case they're useful... fsys = fsimpl.WithContextFS(ctx, fsys) fsys = fsimpl.WithHeaderFS(n.Header, fsys) - fsys = datafs.WithDataSourceRegistryFS(t.sr, fsys) + fsys = datafs.WithDataSourceRegistryFS(r.sr, fsys) // valid fs.FS paths have no trailing slash fname = strings.TrimRight(fname, "/") diff --git a/render_test.go b/render_test.go index 1a4d0016..936e0b01 100644 --- a/render_test.go +++ b/render_test.go @@ -31,7 +31,7 @@ func TestRenderTemplate(t *testing.T) { ctx := datafs.ContextWithFSProvider(context.Background(), fsp) // no options - built-in function - tr := NewRenderer(Options{}) + tr := NewRenderer(RenderOptions{}) out := &bytes.Buffer{} err := tr.Render(ctx, "test", "{{ `hello world` | toUpper }}", out) require.NoError(t, err) @@ -43,11 +43,11 @@ func TestRenderTemplate(t *testing.T) { t.Setenv("WORLD", "world") - tr = NewRenderer(Options{ - Context: map[string]Datasource{ + tr = NewRenderer(RenderOptions{ + Context: map[string]DataSource{ "hi": {URL: hu}, }, - Datasources: map[string]Datasource{ + Datasources: map[string]DataSource{ "world": {URL: wu}, }, }) @@ -62,8 +62,8 @@ func TestRenderTemplate(t *testing.T) { fsys["nested.tmpl"] = &fstest.MapFile{Data: []byte( `<< . | toUpper >>`)} - tr = NewRenderer(Options{ - Templates: map[string]Datasource{ + tr = NewRenderer(RenderOptions{ + Templates: map[string]DataSource{ "nested": {URL: nu}, }, LDelim: "<<", @@ -75,7 +75,7 @@ func TestRenderTemplate(t *testing.T) { assert.Equal(t, "HELLO", out.String()) // errors contain the template name - tr = NewRenderer(Options{}) + tr = NewRenderer(RenderOptions{}) err = tr.Render(ctx, "foo", `{{ bogus }}`, &bytes.Buffer{}) assert.ErrorContains(t, err, "template: foo:") } @@ -86,7 +86,7 @@ func ExampleRenderer() { ctx := context.Background() // create a new template renderer - tr := NewRenderer(Options{}) + tr := NewRenderer(RenderOptions{}) // render a template to stdout err := tr.Render(ctx, "mytemplate", @@ -104,7 +104,7 @@ func ExampleRenderer_manyTemplates() { ctx := context.Background() // create a new template renderer - tr := NewRenderer(Options{}) + tr := NewRenderer(RenderOptions{}) templates := []Template{ { @@ -145,8 +145,8 @@ func ExampleRenderer_datasources() { // a datasource that retrieves JSON from a public API u, _ := url.Parse("https://ipinfo.io/1.1.1.1") - tr := NewRenderer(Options{ - Context: map[string]Datasource{ + tr := NewRenderer(RenderOptions{ + Context: map[string]DataSource{ "info": {URL: u}, }, }) diff --git a/template.go b/template.go index bb8976cf..2f6aaae4 100644 --- a/template.go +++ b/template.go @@ -11,7 +11,6 @@ import ( "text/template" "github.com/hack-pad/hackpadfs" - "github.com/hairyhenderson/gomplate/v4/internal/config" "github.com/hairyhenderson/gomplate/v4/internal/datafs" "github.com/hairyhenderson/gomplate/v4/internal/iohelpers" "github.com/hairyhenderson/gomplate/v4/tmpl" @@ -45,9 +44,7 @@ func copyFuncMap(funcMap template.FuncMap) template.FuncMap { } // gatherTemplates - gather and prepare templates for rendering -// -//nolint:gocyclo -func gatherTemplates(ctx context.Context, cfg *config.Config, outFileNamer func(context.Context, string) (string, error)) ([]Template, error) { +func gatherTemplates(ctx context.Context, cfg *Config, outFileNamer outputNamer) ([]Template, error) { mode, modeOverride, err := cfg.GetMode() if err != nil { return nil, err @@ -56,7 +53,6 @@ func gatherTemplates(ctx context.Context, cfg *config.Config, outFileNamer func( var templates []Template switch { - // the arg-provided input string gets a special name case cfg.Input != "": // open the output file - no need to close it, as it will be closed by the // caller later @@ -66,6 +62,7 @@ func gatherTemplates(ctx context.Context, cfg *config.Config, outFileNamer func( } templates = []Template{{ + // the arg-provided input string gets a special name Name: "", Text: cfg.Input, Writer: target, @@ -76,7 +73,7 @@ func gatherTemplates(ctx context.Context, cfg *config.Config, outFileNamer func( if err != nil { return nil, fmt.Errorf("walkDir: %w", err) } - case cfg.Input == "": + case len(cfg.InputFiles) > 0: templates = make([]Template, len(cfg.InputFiles)) for i, f := range cfg.InputFiles { templates[i], err = fileToTemplate(ctx, cfg, f, cfg.OutputFiles[i], mode, modeOverride) @@ -92,7 +89,7 @@ func gatherTemplates(ctx context.Context, cfg *config.Config, outFileNamer func( // walkDir - given an input dir `dir` and an output dir `outDir`, and a list // of .gomplateignore and exclude globs (if any), walk the input directory and create a list of // tplate objects, and an error, if any. -func walkDir(ctx context.Context, cfg *config.Config, dir string, outFileNamer func(context.Context, string) (string, error), excludeGlob []string, excludeProcessingGlob []string, mode os.FileMode, modeOverride bool) ([]Template, error) { +func walkDir(ctx context.Context, cfg *Config, dir string, outFileNamer outputNamer, excludeGlob []string, excludeProcessingGlob []string, mode os.FileMode, modeOverride bool) ([]Template, error) { dir = filepath.ToSlash(filepath.Clean(dir)) // get a filesystem rooted in the same volume as dir (or / on non-Windows) @@ -157,7 +154,7 @@ func walkDir(ctx context.Context, cfg *config.Config, dir string, outFileNamer f inPath = filepath.ToSlash(inPath) // but outFileNamer expects only the filename itself - outFile, err := outFileNamer(ctx, file) + outFile, err := outFileNamer.Name(ctx, file) if err != nil { return nil, fmt.Errorf("outFileNamer: %w", err) } @@ -192,13 +189,14 @@ func walkDir(ctx context.Context, cfg *config.Config, dir string, outFileNamer f return templates, nil } -func readInFile(ctx context.Context, cfg *config.Config, inFile string, mode os.FileMode) (source string, newmode os.FileMode, err error) { +func readInFile(ctx context.Context, inFile string, mode os.FileMode) (source string, newmode os.FileMode, err error) { newmode = mode var b []byte //nolint:nestif if inFile == "-" { - b, err = io.ReadAll(cfg.Stdin) + stdin := datafs.StdinFromContext(ctx) + b, err = io.ReadAll(stdin) if err != nil { return source, newmode, fmt.Errorf("read from stdin: %w", err) } @@ -232,7 +230,7 @@ func readInFile(ctx context.Context, cfg *config.Config, inFile string, mode os. return source, newmode, err } -func getOutfileHandler(ctx context.Context, cfg *config.Config, outFile string, mode os.FileMode, modeOverride bool) (io.Writer, error) { +func getOutfileHandler(ctx context.Context, cfg *Config, outFile string, mode os.FileMode, modeOverride bool) (io.Writer, error) { // open the output file - no need to close it, as it will be closed by the // caller later target, err := openOutFile(ctx, outFile, 0o755, mode, modeOverride, cfg.Stdout) @@ -243,8 +241,8 @@ func getOutfileHandler(ctx context.Context, cfg *config.Config, outFile string, return target, nil } -func copyFileToOutDir(ctx context.Context, cfg *config.Config, inFile, outFile string, mode os.FileMode, modeOverride bool) error { - sourceStr, newmode, err := readInFile(ctx, cfg, inFile, mode) +func copyFileToOutDir(ctx context.Context, cfg *Config, inFile, outFile string, mode os.FileMode, modeOverride bool) error { + sourceStr, newmode, err := readInFile(ctx, inFile, mode) if err != nil { return err } @@ -263,8 +261,8 @@ func copyFileToOutDir(ctx context.Context, cfg *config.Config, inFile, outFile s return err } -func fileToTemplate(ctx context.Context, cfg *config.Config, inFile, outFile string, mode os.FileMode, modeOverride bool) (Template, error) { - source, newmode, err := readInFile(ctx, cfg, inFile, mode) +func fileToTemplate(ctx context.Context, cfg *Config, inFile, outFile string, mode os.FileMode, modeOverride bool) (Template, error) { + source, newmode, err := readInFile(ctx, inFile, mode) if err != nil { return Template{}, err } diff --git a/template_test.go b/template_test.go index c9d716ad..1ae614b1 100644 --- a/template_test.go +++ b/template_test.go @@ -13,7 +13,6 @@ import ( "github.com/hack-pad/hackpadfs" "github.com/hack-pad/hackpadfs/mem" - "github.com/hairyhenderson/gomplate/v4/internal/config" "github.com/hairyhenderson/gomplate/v4/internal/datafs" "github.com/hairyhenderson/gomplate/v4/internal/iohelpers" @@ -72,7 +71,7 @@ func TestGatherTemplates(t *testing.T) { ctx := datafs.ContextWithFSProvider(context.Background(), datafs.WrappedFSProvider(fsys, "file")) - cfg := &config.Config{ + cfg := &Config{ Stdin: &bytes.Buffer{}, Stdout: &bytes.Buffer{}, } @@ -82,7 +81,7 @@ func TestGatherTemplates(t *testing.T) { assert.Len(t, templates, 1) buf := &bytes.Buffer{} - cfg = &config.Config{ + cfg = &Config{ Input: "foo", Stdout: buf, } @@ -96,7 +95,7 @@ func TestGatherTemplates(t *testing.T) { require.NoError(t, err) assert.Equal(t, "hello world", buf.String()) - templates, err = gatherTemplates(ctx, &config.Config{ + templates, err = gatherTemplates(ctx, &Config{ Input: "foo", OutputFiles: []string{"out"}, }, nil) @@ -116,7 +115,7 @@ func TestGatherTemplates(t *testing.T) { _ = hackpadfs.Remove(fsys, "out") buf = &bytes.Buffer{} - cfg = &config.Config{ + cfg = &Config{ InputFiles: []string{"foo"}, OutputFiles: []string{"out"}, Stdout: buf, @@ -137,7 +136,7 @@ func TestGatherTemplates(t *testing.T) { hackpadfs.Remove(fsys, "out") buf = &bytes.Buffer{} - cfg = &config.Config{ + cfg = &Config{ InputFiles: []string{"foo"}, OutputFiles: []string{"out"}, OutMode: "755", @@ -158,7 +157,7 @@ func TestGatherTemplates(t *testing.T) { assert.Equal(t, iohelpers.NormalizeFileMode(0o755), info.Mode()) hackpadfs.Remove(fsys, "out") - templates, err = gatherTemplates(ctx, &config.Config{ + templates, err = gatherTemplates(ctx, &Config{ InputDir: "in", OutputDir: "out", }, simpleNamer("out")) @@ -195,7 +194,7 @@ func TestParseNestedTemplates(t *testing.T) { // simple test with single template u, _ := url.Parse("foo.t") - nested := config.Templates{"foo": {URL: u}} + nested := map[string]DataSource{"foo": {URL: u}} tmpl, _ := template.New("root").Parse(`{{ template "foo" }}`) @@ -215,7 +214,7 @@ func TestParseNestedTemplates(t *testing.T) { fsys["dir/bar.t"] = &fstest.MapFile{Data: []byte("bar"), Mode: 0o600} u, _ = url.Parse("dir/") - nested["dir"] = config.DataSource{URL: u} + nested["dir"] = DataSource{URL: u} tmpl, _ = template.New("root").Parse(`{{ template "dir/foo.t" }} {{ template "dir/bar.t" }}`) diff --git a/template_unix_test.go b/template_unix_test.go index db6fd259..c914f683 100644 --- a/template_unix_test.go +++ b/template_unix_test.go @@ -8,7 +8,6 @@ import ( "github.com/hack-pad/hackpadfs" "github.com/hack-pad/hackpadfs/mem" - "github.com/hairyhenderson/gomplate/v4/internal/config" "github.com/hairyhenderson/gomplate/v4/internal/datafs" "github.com/stretchr/testify/assert" @@ -21,7 +20,7 @@ func TestWalkDir_UNIX(t *testing.T) { ctx := datafs.ContextWithFSProvider(context.Background(), datafs.WrappedFSProvider(fsys, "file")) - cfg := &config.Config{} + cfg := &Config{} _, err := walkDir(ctx, cfg, "/indir", simpleNamer("/outdir"), nil, nil, 0, false) require.Error(t, err) diff --git a/template_windows_test.go b/template_windows_test.go index 4deba7d9..590e7969 100644 --- a/template_windows_test.go +++ b/template_windows_test.go @@ -11,7 +11,6 @@ import ( "github.com/hack-pad/hackpadfs" "github.com/hack-pad/hackpadfs/mem" - "github.com/hairyhenderson/gomplate/v4/internal/config" "github.com/hairyhenderson/gomplate/v4/internal/datafs" "github.com/stretchr/testify/assert" @@ -30,7 +29,7 @@ func TestWalkDir_Windows(t *testing.T) { ctx := datafs.ContextWithFSProvider(context.Background(), datafs.WrappedFSProvider(fsys, "file")) - cfg := &config.Config{} + cfg := &Config{} _, err := walkDir(ctx, cfg, `C:\indir`, simpleNamer(`C:/outdir`), nil, nil, 0, false) require.Error(t, err) -- cgit v1.2.3