From 9fe52ad75fe55222183aba42b4c7895579dc65b1 Mon Sep 17 00:00:00 2001 From: Ben Meier Date: Sun, 10 Mar 2024 01:55:02 +0000 Subject: [PATCH] feat: added generate command and example provisioners Signed-off-by: Ben Meier --- internal/command/default.provisioners.yaml | 1 + internal/command/generate.go | 280 +++++++++++++++++++++ internal/command/init.go | 20 +- internal/command/override_utils.go | 106 ++++++++ internal/command/override_utils_test.go | 117 +++++++++ internal/project/project.go | 12 +- internal/provisioners/loader/load.go | 5 + 7 files changed, 529 insertions(+), 12 deletions(-) create mode 100644 internal/command/default.provisioners.yaml create mode 100644 internal/command/generate.go create mode 100644 internal/command/override_utils.go create mode 100644 internal/command/override_utils_test.go diff --git a/internal/command/default.provisioners.yaml b/internal/command/default.provisioners.yaml new file mode 100644 index 0000000..0637a08 --- /dev/null +++ b/internal/command/default.provisioners.yaml @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/internal/command/generate.go b/internal/command/generate.go new file mode 100644 index 0000000..3f45e62 --- /dev/null +++ b/internal/command/generate.go @@ -0,0 +1,280 @@ +package command + +import ( + "context" + "fmt" + "log/slog" + "os" + "slices" + "strings" + + "github.com/compose-spec/compose-go/v2/types" + "github.com/imdario/mergo" + "github.com/score-spec/score-go/loader" + "github.com/score-spec/score-go/schema" + score "github.com/score-spec/score-go/types" + "github.com/spf13/cobra" + "gopkg.in/yaml.v3" + + "github.com/score-spec/score-compose/internal/compose" + "github.com/score-spec/score-compose/internal/project" + "github.com/score-spec/score-compose/internal/provisioners" + provloader "github.com/score-spec/score-compose/internal/provisioners/loader" +) + +const ( + generateCmdOverridesFileFlag = "overrides-file" + generateCmdOverridePropertyFlag = "override-property" +) + +var generateCommand = &cobra.Command{ + Use: "generate", + Args: cobra.ArbitraryArgs, + Short: "Convert one or more Score files into a Docker compose manifest", + Long: `The generate command will convert Score files in the current Score compose project into a combined Docker compose +manifest. All resources and links between Workloads will be resolved and provisioned as required. + +By default this command looks for score.yaml in the current directory, but can take explicit file names as positional +arguments. + +"score-compose init" MUST be run first. An error will be thrown if the project directory is not present. +`, + Example: ` + # Use default values + score-compose generate + + # Specify Score files + score-compose generate score.yaml *.score.yaml + + # Provide overrides when one score file is provided + score-compose generate score.yaml --override-file=./overrides.score.yaml --override-property=metadata.key=value + + # Provide overrides when more than one score file is provided + score-compose generate score.yaml score-two.yaml --override-file=my-workload=./overrides.score.yaml --override-property=my-other-workload=metadata.key=value`, + + // don't print the errors - we print these ourselves in main() + SilenceErrors: true, + + RunE: func(cmd *cobra.Command, args []string) error { + cmd.SilenceUsage = true + + // find the input score files + inputFiles := []string{scoreFileDefault} + if len(args) > 0 { + inputFiles = args + } + slices.Sort(inputFiles) + slog.Debug("Input Score files", "files", inputFiles) + + // first load all the score files, parse them with a dummy yaml decoder to find the workload name, reject any + // with invalid or duplicate names. + workloadNames, workloadSpecs, err := loadRawScoreFiles(inputFiles) + if err != nil { + return err + } + slog.Debug("Input Workload names", "names", workloadNames) + if len(workloadNames) == 0 { + return fmt.Errorf("at least one Score file must be provided") + } + + // Now read and apply any overrides files to the score files + if v, _ := cmd.Flags().GetString(generateCmdOverridesFileFlag); v != "" { + if len(workloadNames) > 1 { + return fmt.Errorf("--%s cannot be used when multiple score files are provided", generateCmdOverridesFileFlag) + } + if err := parseAndApplyOverrideFile(v, generateCmdOverridesFileFlag, workloadSpecs[workloadNames[0]]); err != nil { + return err + } + } + + // Now read, parse, and apply any override properties to the score files + if v, _ := cmd.Flags().GetStringArray(generateCmdOverridePropertyFlag); len(v) > 0 { + for _, overridePropertyEntry := range v { + if err := parseAndApplyOverrideProperty(overridePropertyEntry, generateCmdOverridesFileFlag, workloadSpecs[workloadNames[0]]); err != nil { + return err + } + } + } + + sd, ok, err := project.LoadStateDirectory(".") + if err != nil { + return fmt.Errorf("failed to load existing state directory: %w", err) + } else if !ok { + return fmt.Errorf("state directory does not exist, please run \"score-compose init\" first") + } + slog.Info(fmt.Sprintf("Loaded state directory with docker compose project '%s'", sd.State.ComposeProjectName)) + currentState := &sd.State + + // Now validate with score spec + for workloadName, spec := range workloadSpecs { + // Ensure transforms are applied (be a good citizen) + if changes, err := schema.ApplyCommonUpgradeTransforms(spec); err != nil { + return fmt.Errorf("failed to upgrade spec: %w", err) + } else if len(changes) > 0 { + for _, change := range changes { + slog.Info(fmt.Sprintf("Applying backwards compatible upgrade to '%s': %s", workloadName, change)) + } + } + if err := schema.Validate(spec); err != nil { + return fmt.Errorf("validation errors in workload '%s': %w", workloadName, err) + } + slog.Info(fmt.Sprintf("Validated workload '%s'", workloadName)) + + var out score.Workload + if err := loader.MapSpec(&out, spec); err != nil { + return fmt.Errorf("failed to convert '%s' to structure: %w", workloadName, err) + } + + currentState, err = currentState.WithWorkload(&out, nil) + if err != nil { + return fmt.Errorf("failed to add workload '%s': %w", workloadName, err) + } + } + + loadedProvisioners, err := provloader.LoadProvisionersFromDirectory(sd.Path, provloader.DefaultSuffix) + if err != nil { + return fmt.Errorf("failed to load provisioners: %w", err) + } else if len(loadedProvisioners) > 0 { + slog.Info(fmt.Sprintf("Successfully loaded %d resource provisioners", len(loadedProvisioners))) + } + + currentState, err = currentState.WithPrimedResources() + if err != nil { + return fmt.Errorf("failed to prime resources: %w", err) + } + + superProject := &types.Project{ + Name: sd.State.ComposeProjectName, + Services: make(types.Services, 0), + Volumes: map[string]types.VolumeConfig{}, + Networks: map[string]types.NetworkConfig{}, + } + + currentState, err = provisioners.ProvisionResources(context.Background(), currentState, loadedProvisioners, superProject) + if err != nil { + return fmt.Errorf("failed to provision: %w", err) + } else if len(currentState.Resources) > 0 { + slog.Info(fmt.Sprintf("Provisioned %d resources", len(currentState.Resources))) + } + + for _, workloadName := range workloadNames { + outputFunctions, err := currentState.GetResourceOutputForWorkload(workloadName) + if err != nil { + return err + } + + slog.Info(fmt.Sprintf("Converting workload '%s' to Docker compose", workloadName)) + spec := currentState.Workloads[workloadName].Spec + converted, err := compose.ConvertSpec(&spec, outputFunctions) + if err != nil { + return fmt.Errorf("failed to convert workload '%s' to Docker compose: %w", workloadName, err) + } + + for serviceName, service := range converted.Services { + if _, ok := superProject.Services[serviceName]; ok { + return fmt.Errorf("failed to add converted workload '%s': duplicate service name '%s'", workloadName, serviceName) + } + superProject.Services[serviceName] = service + } + for volumeName, volume := range converted.Volumes { + if _, ok := superProject.Volumes[volumeName]; ok { + return fmt.Errorf("failed to add converted workload '%s': duplicate volume name '%s'", workloadName, volumeName) + } + superProject.Volumes[volumeName] = volume + } + for networkName, network := range converted.Networks { + if _, ok := superProject.Networks[networkName]; ok { + return fmt.Errorf("failed to add converted workload '%s': duplicated network name '%s'", workloadName, networkName) + } + superProject.Networks[networkName] = network + } + } + + raw, _ := yaml.Marshal(superProject) + + v, _ := cmd.Flags().GetString("output") + if v == "" { + return fmt.Errorf("no output file specified") + } else if v == "-" { + _, _ = fmt.Fprint(cmd.OutOrStdout(), string(raw)) + } else if err := os.WriteFile(v+".temp", raw, 0755); err != nil { + return fmt.Errorf("failed to write output file: %w", err) + } else if err := os.Rename(v+".temp", v); err != nil { + return fmt.Errorf("failed to complete writing output file: %w", err) + } + return nil + }, +} + +// loadRawScoreFiles loads raw score specs as yaml from the given files and finds all the workload names. It throws +// errors if it failed to read, load, or if names are duplicated. +func loadRawScoreFiles(fileNames []string) ([]string, map[string]map[string]interface{}, error) { + workloadNames := make([]string, 0, len(fileNames)) + workloadToRawScore := make(map[string]map[string]interface{}, len(fileNames)) + + for _, fileName := range fileNames { + var out map[string]interface{} + raw, err := os.ReadFile(fileName) + if err != nil { + return nil, nil, fmt.Errorf("failed to read '%s': %w", fileName, err) + } else if err := yaml.Unmarshal(raw, &out); err != nil { + return nil, nil, fmt.Errorf("failed to decode '%s' as yaml: %w", fileName, err) + } + + var workloadName string + if meta, ok := out["metadata"].(map[string]interface{}); ok { + workloadName, _ = meta["name"].(string) + if _, ok := workloadToRawScore[workloadName]; ok { + return nil, nil, fmt.Errorf("workload name '%s' in file '%s' is used more than once", workloadName, fileName) + } + } + workloadNames = append(workloadNames, workloadName) + workloadToRawScore[workloadName] = out + } + return workloadNames, workloadToRawScore, nil +} + +func init() { + generateCommand.Flags().StringP("output", "o", "compose.yaml", "The output file to write the composed compose file to") + generateCommand.Flags().String(generateCmdOverridesFileFlag, "", "An optional file of Score overrides to merge in") + generateCommand.Flags().StringArray(generateCmdOverridePropertyFlag, []string{}, "An optional set of path=key overrides to set or remove") + rootCmd.AddCommand(generateCommand) +} + +func parseAndApplyOverrideFile(entry string, flagName string, spec map[string]interface{}) error { + if raw, err := os.ReadFile(entry); err != nil { + return fmt.Errorf("--%s '%s' is invalid, failed to read file: %w", flagName, entry, err) + } else { + slog.Info(fmt.Sprintf("Applying overrides from %s to workload", entry)) + var out map[string]interface{} + if err := yaml.Unmarshal(raw, &out); err != nil { + return fmt.Errorf("--%s '%s' is invalid: failed to decode yaml: %w", flagName, entry, err) + } else if err := mergo.Merge(&spec, out, mergo.WithOverride); err != nil { + return fmt.Errorf("--%s '%s' failed to apply: %w", flagName, entry, err) + } + } + return nil +} + +func parseAndApplyOverrideProperty(entry string, flagName string, spec map[string]interface{}) error { + parts := strings.SplitN(entry, "=", 2) + if len(parts) != 2 { + return fmt.Errorf("--%s '%s' is invalid, expected a =-separated path and value", flagName, entry) + } + if parts[1] == "" { + slog.Info(fmt.Sprintf("Overriding '%s' in workload", parts[0])) + if err := writePathInStruct(spec, parseDotPathParts(parts[0]), true, nil); err != nil { + return fmt.Errorf("--%s '%s' could not be applied: %w", flagName, entry, err) + } + } else { + var value interface{} + if err := yaml.Unmarshal([]byte(parts[1]), &value); err != nil { + return fmt.Errorf("--%s '%s' is invalid, failed to unmarshal value as json: %w", flagName, entry, err) + } + slog.Info(fmt.Sprintf("Overriding '%s' in workload", parts[0])) + if err := writePathInStruct(spec, parseDotPathParts(parts[0]), false, value); err != nil { + return fmt.Errorf("--%s '%s' could not be applied: %w", flagName, entry, err) + } + } + return nil +} diff --git a/internal/command/init.go b/internal/command/init.go index 12353c5..dd85c38 100644 --- a/internal/command/init.go +++ b/internal/command/init.go @@ -1,6 +1,7 @@ package command import ( + _ "embed" "errors" "fmt" "log/slog" @@ -11,6 +12,7 @@ import ( "github.com/spf13/cobra" "github.com/score-spec/score-compose/internal/project" + "github.com/score-spec/score-compose/internal/provisioners/loader" ) const DefaultScoreFileContent = `# Score provides a developer-centric and platform-agnostic @@ -50,6 +52,9 @@ service: resources: {} ` +//go:embed default.provisioners.yaml +var defaultProvisionersContent string + var initCmd = &cobra.Command{ Use: "init", Args: cobra.NoArgs, @@ -103,7 +108,7 @@ acts as a namespace when multiple score files and containers are used. slog.Info(fmt.Sprintf("Writing new state directory '%s'", project.DefaultRelativeStateDirectory)) wd, _ := os.Getwd() - sd := &project.StateDirectory{ + sd = &project.StateDirectory{ Path: project.DefaultRelativeStateDirectory, State: project.State{ Workloads: map[string]project.ScoreWorkloadState{}, @@ -120,6 +125,12 @@ acts as a namespace when multiple score files and containers are used. if err := sd.Persist(); err != nil { return fmt.Errorf("failed to persist new compose project name: %w", err) } + + dst := "default" + loader.DefaultSuffix + slog.Info(fmt.Sprintf("Writing default provisioners yaml file '%s'", dst)) + if err := os.WriteFile(filepath.Join(sd.Path, dst), []byte(defaultProvisionersContent), 0644); err != nil { + return fmt.Errorf("failed to write provisioners: %w", err) + } } if _, err := os.ReadFile(initCmdScoreFile); err != nil { @@ -135,6 +146,13 @@ acts as a namespace when multiple score files and containers are used. } else { slog.Info(fmt.Sprintf("Found existing Score file '%s'", initCmdScoreFile)) } + + if provs, err := loader.LoadProvisionersFromDirectory(sd.Path, loader.DefaultSuffix); err != nil { + return fmt.Errorf("failed to load existing provisioners: %w", err) + } else { + slog.Debug(fmt.Sprintf("Successfully loaded %d resource provisioners", len(provs))) + } + slog.Info(fmt.Sprintf("Read more about the Score specification at https://docs.score.dev/docs/")) return nil diff --git a/internal/command/override_utils.go b/internal/command/override_utils.go new file mode 100644 index 0000000..fd832d1 --- /dev/null +++ b/internal/command/override_utils.go @@ -0,0 +1,106 @@ +package command + +import ( + "fmt" + "slices" + "strconv" + "strings" +) + +func parseDotPathParts(input string) []string { + // support escaping dot's to insert elements with a . in them. + input = strings.ReplaceAll(input, "\\\\", "\x01") + input = strings.ReplaceAll(input, "\\.", "\x00") + parts := strings.Split(input, ".") + for i, part := range parts { + part = strings.ReplaceAll(part, "\x00", ".") + part = strings.ReplaceAll(part, "\x01", "\\") + parts[i] = part + } + return parts +} + +func writePathInStruct(input map[string]interface{}, path []string, isDelete bool, value interface{}) error { + if len(path) == 0 { + return fmt.Errorf("cannot change root node") + } + + // the current position in the tree + var current interface{} = input + + // a reference to the map that holds current + var parentMap map[string]interface{} + var parentKey string + + // first traverse to the right location + for _, s := range path[:len(path)-1] { + switch currentType := current.(type) { + case map[string]interface{}: + parentMap = currentType + parentKey = s + + next, ok := currentType[s] + if ok { + current = next + } else { + currentType[s] = make(map[string]interface{}) + current = currentType[s] + } + case []interface{}: + parentMap = nil + + idx, err := strconv.Atoi(s) + if err != nil { + return fmt.Errorf("cannot index '%s' in array", s) + } else if idx < 0 || idx >= len(currentType) { + return fmt.Errorf("cannot set '%s' in array: out of range", s) + } + current = currentType[idx] + default: + return fmt.Errorf("cannot lookup property or index '%s' in %T", s, currentType) + } + } + // then apply the change + + key := path[len(path)-1] + switch currentType := current.(type) { + case map[string]interface{}: + if isDelete { + delete(currentType, key) + } else { + currentType[key] = value + } + case []interface{}: + // This is where the bulk of the complexity comes from. Parsing validating and then navigating the slices. + idx, err := strconv.Atoi(key) + if err != nil { + return fmt.Errorf("cannot index '%s' in array", key) + } else if idx < -1 || idx >= len(currentType) { + return fmt.Errorf("cannot set '%s' in array: out of range", key) + } else if isDelete { + if idx == -1 { + return fmt.Errorf("cannot delete '%s' in array", key) + } else { + if parentMap != nil { + parentMap[parentKey] = slices.Delete(currentType, idx, idx+1) + } else { + return fmt.Errorf("override in nested arrays is not supported") + } + } + } else { + if idx == -1 { + if parentMap != nil { + parentMap[parentKey] = append(currentType, value) + } else { + return fmt.Errorf("override in nested arrays is not supported") + } + } else { + currentType[idx] = value + } + } + default: + return fmt.Errorf("cannot lookup property or index '%s' in %T", key, currentType) + } + + return nil +} diff --git a/internal/command/override_utils_test.go b/internal/command/override_utils_test.go new file mode 100644 index 0000000..c89109f --- /dev/null +++ b/internal/command/override_utils_test.go @@ -0,0 +1,117 @@ +package command + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseDotPathParts(t *testing.T) { + for _, tc := range []struct { + Input string + Expected []string + }{ + {"", []string{""}}, + {"a", []string{"a"}}, + {"a.b", []string{"a", "b"}}, + {"a.-1", []string{"a", "-1"}}, + {"a.b\\.c", []string{"a", "b.c"}}, + {"a.b\\\\.c", []string{"a", "b\\", "c"}}, + } { + t.Run(tc.Input, func(t *testing.T) { + assert.Equal(t, tc.Expected, parseDotPathParts(tc.Input)) + }) + } +} + +func TestWritePathInStruct(t *testing.T) { + for _, tc := range []struct { + Name string + Spec string + Path []string + Delete bool + Value interface{} + Expected string + ExpectedError error + }{ + { + Name: "simple object set", + Spec: `{"a":{"b":[{}]}}`, + Path: []string{"a", "b", "0", "c"}, + Value: "hello", + Expected: `{"a":{"b":[{"c":"hello"}]}}`, + }, + { + Name: "simple object delete", + Spec: `{"a":{"b":[{"c":"hello"}]}}`, + Path: []string{"a", "b", "0", "c"}, + Delete: true, + Expected: `{"a":{"b":[{}]}}`, + }, + { + Name: "simple array set", + Spec: `{"a":[{}]}`, + Path: []string{"a", "0"}, + Value: "hello", + Expected: `{"a":["hello"]}`, + }, + { + Name: "simple array append", + Spec: `{"a":["hello"]}`, + Path: []string{"a", "-1"}, + Value: "world", + Expected: `{"a":["hello","world"]}`, + }, + { + Name: "simple array delete", + Spec: `{"a":["hello", "world"]}`, + Path: []string{"a", "0"}, + Delete: true, + Expected: `{"a":["world"]}`, + }, + { + Name: "build object via path", + Spec: `{}`, + Path: []string{"a", "b"}, + Value: "hello", + Expected: `{"a":{"b":"hello"}}`, + }, + { + Name: "bad index str", + Spec: `{"a":[]}`, + Path: []string{"a", "b"}, + Value: "hello", + ExpectedError: fmt.Errorf("cannot index 'b' in array"), + }, + { + Name: "index out of range", + Spec: `{"a": [0]}`, + Path: []string{"a", "2"}, + Value: "hello", + ExpectedError: fmt.Errorf("cannot set '2' in array: out of range"), + }, + { + Name: "no append nested arrays", + Spec: `{"a":[[0]]}`, + Path: []string{"a", "0", "-1"}, + Value: "hello", + ExpectedError: fmt.Errorf("override in nested arrays is not supported"), + }, + } { + t.Run(tc.Name, func(t *testing.T) { + var inSpec map[string]interface{} + assert.NoError(t, json.Unmarshal([]byte(tc.Spec), &inSpec)) + err := writePathInStruct(inSpec, tc.Path, tc.Delete, tc.Value) + if tc.ExpectedError != nil { + assert.EqualError(t, err, tc.ExpectedError.Error()) + } else { + if assert.NoError(t, err) { + outSpec, _ := json.Marshal(inSpec) + assert.JSONEq(t, tc.Expected, string(outSpec)) + } + } + }) + } +} diff --git a/internal/project/project.go b/internal/project/project.go index aad8ac5..0aaa5a5 100644 --- a/internal/project/project.go +++ b/internal/project/project.go @@ -8,9 +8,6 @@ import ( "path/filepath" "gopkg.in/yaml.v3" - - "github.com/score-spec/score-compose/internal/provisioners" - "github.com/score-spec/score-compose/internal/provisioners/loader" ) const ( @@ -26,8 +23,6 @@ type StateDirectory struct { Path string // The current state file State State - // The set of provisioners that are available - Provisioners []provisioners.Provisioner } // Persist ensures that the directory is created and that the current config file has been written with the latest settings. @@ -75,10 +70,5 @@ func LoadStateDirectory(directory string) (*StateDirectory, bool, error) { return nil, true, fmt.Errorf("state file couldn't be decoded: %w", err) } - provs, err := loader.LoadProvisionersFromDirectory(d, ".provisioners.yaml") - if err != nil { - return nil, false, fmt.Errorf("failed to load provisioners: %w", err) - } - - return &StateDirectory{d, out, provs}, true, nil + return &StateDirectory{d, out}, true, nil } diff --git a/internal/provisioners/loader/load.go b/internal/provisioners/loader/load.go index c44d395..7b6e8ca 100644 --- a/internal/provisioners/loader/load.go +++ b/internal/provisioners/loader/load.go @@ -3,6 +3,7 @@ package loader import ( "bytes" "fmt" + "log/slog" "net/url" "os" "path/filepath" @@ -14,6 +15,8 @@ import ( "github.com/score-spec/score-compose/internal/provisioners/templateprov" ) +const DefaultSuffix = ".provisioners.yaml" + // LoadProvisioners loads a list of provisioners from the raw contents from a yaml file. func LoadProvisioners(raw []byte) ([]provisioners.Provisioner, error) { var intermediate []map[string]interface{} @@ -34,6 +37,7 @@ func LoadProvisioners(raw []byte) ([]provisioners.Provisioner, error) { if p, err := templateprov.Parse(m); err != nil { return nil, fmt.Errorf("%d: %s: failed to parse: %w", i, uri, err) } else { + slog.Debug(fmt.Sprintf("Loaded provisioner %s", p.Uri())) out = append(out, p) } default: @@ -45,6 +49,7 @@ func LoadProvisioners(raw []byte) ([]provisioners.Provisioner, error) { // LoadProvisionersFromDirectory loads all providers we can find in files that end in the common suffix. func LoadProvisionersFromDirectory(path string, suffix string) ([]provisioners.Provisioner, error) { + slog.Debug(fmt.Sprintf("Loading providers with suffix %s in directory '%s'", suffix, path)) items, err := os.ReadDir(path) if err != nil { return nil, err