diff --git a/internal/command/99-default.provisioners.yaml b/internal/command/99-default.provisioners.yaml new file mode 100644 index 0000000..9f50605 --- /dev/null +++ b/internal/command/99-default.provisioners.yaml @@ -0,0 +1,76 @@ +# The default volume provisioner provided by score-compose allows basic volume resources to be created in the resources +# system. The volume resource just creates an ephemeral Docker volume with a random string as the name, and source +# attribute that we can reference. +- uri: template://default-provisioners/volume + # By default, match all classes and ids of volume. If you want to override this, create another provisioner definition + # with a higher priority. + type: volume + init: | + randomVolumeName: {{ .Id | replace "." "-" }}-{{ randAlphaNum 6 }} + # Store the random volume name if we haven't chosen one yet, otherwise use the one that exists already + state: | + name: {{ dig "name" .Init.randomVolumeName .State }} + # Return a source value with the volume name. This can be used in volume resource references now. + outputs: | + source: {{ .State.name }} + # Add a volume to the docker compose file. We assume our name is unique here. We also apply a label to help ensure + # that we can track the volume back to the workload and resource that created it. + volumes: | + {{ .State.name }}: + name: {{ .State.name }} + driver: local + labels: + dev.score.compose.res.uid: {{ .Uid }} + +# The default redis provisioner adds a redis service to the project which returns a host, port, username, and password. +- uri: template://default-provisioners/redis + # By default, match all redis types regardless of class and id. If you want to override this, create another + # provisioner definition with a higher priority. + type: redis + # Init template has the default port and a random service name and password if needed later + init: | + port: 6379 + randomServiceName: redis-{{ randAlphaNum 6 }} + randomPassword: {{ randAlphaNum 16 | quote }} + # The only state we need to persist is the chosen random service name and password + state: | + serviceName: {{ dig "serviceName" .Init.randomServiceName .State | quote }} + password: {{ dig "password" .Init.randomPassword .State | quote }} + # Return the outputs schema that consumers expect + outputs: | + host: {{ .State.serviceName }} + port: {{ .Init.port }} + username: default + password: {{ .State.password | quote }} + # write the config file to the mounts directory + files: | + {{ .State.serviceName }}/redis.conf: | + requirepass {{ .State.password }} + port {{ .Init.port }} + save 60 1 + loglevel warning + # add a volume for persistence of the redis data + volumes: | + {{ .State.serviceName }}-data: + name: {{ .State.serviceName }}-data + driver: local + labels: + dev.score.compose.res.uid: {{ .Uid }} + # And the redis service itself with volumes bound in + services: | + {{ .State.serviceName }}: + labels: + dev.score.compose.res.uid: {{ .Uid }} + image: redis:7 + entrypoint: ["redis-server"] + command: ["/usr/local/etc/redis/redis.conf"] + volumes: + - type: bind + source: {{ .MountsDirectory }}/{{ .State.serviceName }}/redis.conf + target: /usr/local/etc/redis/redis.conf + read_only: true + - type: volume + source: {{ .State.serviceName }}-data + target: /data + volume: + nocopy: true diff --git a/internal/command/generate.go b/internal/command/generate.go new file mode 100644 index 0000000..44a072b --- /dev/null +++ b/internal/command/generate.go @@ -0,0 +1,285 @@ +package command + +import ( + "context" + "fmt" + "log/slog" + "os" + "slices" + "strings" + + "github.com/compose-spec/compose-go/v2/types" + "github.com/imdario/mergo" + "github.com/score-spec/score-go/loader" + "github.com/score-spec/score-go/schema" + score "github.com/score-spec/score-go/types" + "github.com/spf13/cobra" + "gopkg.in/yaml.v3" + + "github.com/score-spec/score-compose/internal/compose" + "github.com/score-spec/score-compose/internal/project" + "github.com/score-spec/score-compose/internal/provisioners" + provloader "github.com/score-spec/score-compose/internal/provisioners/loader" +) + +const ( + generateCmdOverridesFileFlag = "overrides-file" + generateCmdOverridePropertyFlag = "override-property" +) + +var generateCommand = &cobra.Command{ + Use: "generate", + Args: cobra.ArbitraryArgs, + Short: "Convert one or more Score files into a Docker compose manifest", + Long: `The generate command will convert Score files in the current Score compose project into a combined Docker compose +manifest. All resources and links between Workloads will be resolved and provisioned as required. + +By default this command looks for score.yaml in the current directory, but can take explicit file names as positional +arguments. + +"score-compose init" MUST be run first. An error will be thrown if the project directory is not present. +`, + Example: ` + # Use default values + score-compose generate + + # Specify Score files + score-compose generate score.yaml *.score.yaml + + # Provide overrides when one score file is provided + score-compose generate score.yaml --override-file=./overrides.score.yaml --override-property=metadata.key=value + + # Provide overrides when more than one score file is provided + score-compose generate score.yaml score-two.yaml --override-file=my-workload=./overrides.score.yaml --override-property=my-other-workload=metadata.key=value`, + + // don't print the errors - we print these ourselves in main() + SilenceErrors: true, + + RunE: func(cmd *cobra.Command, args []string) error { + cmd.SilenceUsage = true + + // find the input score files + inputFiles := []string{scoreFileDefault} + if len(args) > 0 { + inputFiles = args + } + slices.Sort(inputFiles) + slog.Debug("Input Score files", "files", inputFiles) + + // first load all the score files, parse them with a dummy yaml decoder to find the workload name, reject any + // with invalid or duplicate names. + workloadNames, workloadSpecs, err := loadRawScoreFiles(inputFiles) + if err != nil { + return err + } + slog.Debug("Input Workload names", "names", workloadNames) + if len(workloadNames) == 0 { + return fmt.Errorf("at least one Score file must be provided") + } + + // Now read and apply any overrides files to the score files + if v, _ := cmd.Flags().GetString(generateCmdOverridesFileFlag); v != "" { + if len(workloadNames) > 1 { + return fmt.Errorf("--%s cannot be used when multiple score files are provided", generateCmdOverridesFileFlag) + } + if err := parseAndApplyOverrideFile(v, generateCmdOverridesFileFlag, workloadSpecs[workloadNames[0]]); err != nil { + return err + } + } + + // Now read, parse, and apply any override properties to the score files + if v, _ := cmd.Flags().GetStringArray(generateCmdOverridePropertyFlag); len(v) > 0 { + for _, overridePropertyEntry := range v { + if err := parseAndApplyOverrideProperty(overridePropertyEntry, generateCmdOverridesFileFlag, workloadSpecs[workloadNames[0]]); err != nil { + return err + } + } + } + + sd, ok, err := project.LoadStateDirectory(".") + if err != nil { + return fmt.Errorf("failed to load existing state directory: %w", err) + } else if !ok { + return fmt.Errorf("state directory does not exist, please run \"score-compose init\" first") + } + slog.Info(fmt.Sprintf("Loaded state directory with docker compose project '%s'", sd.State.ComposeProjectName)) + currentState := &sd.State + + // Now validate with score spec + for workloadName, spec := range workloadSpecs { + // Ensure transforms are applied (be a good citizen) + if changes, err := schema.ApplyCommonUpgradeTransforms(spec); err != nil { + return fmt.Errorf("failed to upgrade spec: %w", err) + } else if len(changes) > 0 { + for _, change := range changes { + slog.Info(fmt.Sprintf("Applying backwards compatible upgrade to '%s': %s", workloadName, change)) + } + } + if err := schema.Validate(spec); err != nil { + return fmt.Errorf("validation errors in workload '%s': %w", workloadName, err) + } + slog.Info(fmt.Sprintf("Validated workload '%s'", workloadName)) + + var out score.Workload + if err := loader.MapSpec(&out, spec); err != nil { + return fmt.Errorf("failed to convert '%s' to structure: %w", workloadName, err) + } + + currentState, err = currentState.WithWorkload(&out, nil) + if err != nil { + return fmt.Errorf("failed to add workload '%s': %w", workloadName, err) + } + } + + loadedProvisioners, err := provloader.LoadProvisionersFromDirectory(sd.Path, provloader.DefaultSuffix) + if err != nil { + return fmt.Errorf("failed to load provisioners: %w", err) + } else if len(loadedProvisioners) > 0 { + slog.Info(fmt.Sprintf("Successfully loaded %d resource provisioners", len(loadedProvisioners))) + } + + currentState, err = currentState.WithPrimedResources() + if err != nil { + return fmt.Errorf("failed to prime resources: %w", err) + } + + superProject := &types.Project{ + Name: sd.State.ComposeProjectName, + Services: make(types.Services, 0), + Volumes: map[string]types.VolumeConfig{}, + Networks: map[string]types.NetworkConfig{}, + } + + currentState, err = provisioners.ProvisionResources(context.Background(), currentState, loadedProvisioners, superProject) + if err != nil { + return fmt.Errorf("failed to provision: %w", err) + } else if len(currentState.Resources) > 0 { + slog.Info(fmt.Sprintf("Provisioned %d resources", len(currentState.Resources))) + } + + for _, workloadName := range workloadNames { + outputFunctions, err := currentState.GetResourceOutputForWorkload(workloadName) + if err != nil { + return err + } + + slog.Info(fmt.Sprintf("Converting workload '%s' to Docker compose", workloadName)) + spec := currentState.Workloads[workloadName].Spec + converted, err := compose.ConvertSpec(&spec, outputFunctions) + if err != nil { + return fmt.Errorf("failed to convert workload '%s' to Docker compose: %w", workloadName, err) + } + + for serviceName, service := range converted.Services { + if _, ok := superProject.Services[serviceName]; ok { + return fmt.Errorf("failed to add converted workload '%s': duplicate service name '%s'", workloadName, serviceName) + } + superProject.Services[serviceName] = service + } + for volumeName, volume := range converted.Volumes { + if _, ok := superProject.Volumes[volumeName]; ok { + return fmt.Errorf("failed to add converted workload '%s': duplicate volume name '%s'", workloadName, volumeName) + } + superProject.Volumes[volumeName] = volume + } + for networkName, network := range converted.Networks { + if _, ok := superProject.Networks[networkName]; ok { + return fmt.Errorf("failed to add converted workload '%s': duplicated network name '%s'", workloadName, networkName) + } + superProject.Networks[networkName] = network + } + } + + sd.State = *currentState + if err := sd.Persist(); err != nil { + return fmt.Errorf("failed to persist updated state directory: %w", err) + } + + raw, _ := yaml.Marshal(superProject) + + v, _ := cmd.Flags().GetString("output") + if v == "" { + return fmt.Errorf("no output file specified") + } else if v == "-" { + _, _ = fmt.Fprint(cmd.OutOrStdout(), string(raw)) + } else if err := os.WriteFile(v+".temp", raw, 0755); err != nil { + return fmt.Errorf("failed to write output file: %w", err) + } else if err := os.Rename(v+".temp", v); err != nil { + return fmt.Errorf("failed to complete writing output file: %w", err) + } + return nil + }, +} + +// loadRawScoreFiles loads raw score specs as yaml from the given files and finds all the workload names. It throws +// errors if it failed to read, load, or if names are duplicated. +func loadRawScoreFiles(fileNames []string) ([]string, map[string]map[string]interface{}, error) { + workloadNames := make([]string, 0, len(fileNames)) + workloadToRawScore := make(map[string]map[string]interface{}, len(fileNames)) + + for _, fileName := range fileNames { + var out map[string]interface{} + raw, err := os.ReadFile(fileName) + if err != nil { + return nil, nil, fmt.Errorf("failed to read '%s': %w", fileName, err) + } else if err := yaml.Unmarshal(raw, &out); err != nil { + return nil, nil, fmt.Errorf("failed to decode '%s' as yaml: %w", fileName, err) + } + + var workloadName string + if meta, ok := out["metadata"].(map[string]interface{}); ok { + workloadName, _ = meta["name"].(string) + if _, ok := workloadToRawScore[workloadName]; ok { + return nil, nil, fmt.Errorf("workload name '%s' in file '%s' is used more than once", workloadName, fileName) + } + } + workloadNames = append(workloadNames, workloadName) + workloadToRawScore[workloadName] = out + } + return workloadNames, workloadToRawScore, nil +} + +func init() { + generateCommand.Flags().StringP("output", "o", "compose.yaml", "The output file to write the composed compose file to") + generateCommand.Flags().String(generateCmdOverridesFileFlag, "", "An optional file of Score overrides to merge in") + generateCommand.Flags().StringArray(generateCmdOverridePropertyFlag, []string{}, "An optional set of path=key overrides to set or remove") + rootCmd.AddCommand(generateCommand) +} + +func parseAndApplyOverrideFile(entry string, flagName string, spec map[string]interface{}) error { + if raw, err := os.ReadFile(entry); err != nil { + return fmt.Errorf("--%s '%s' is invalid, failed to read file: %w", flagName, entry, err) + } else { + slog.Info(fmt.Sprintf("Applying overrides from %s to workload", entry)) + var out map[string]interface{} + if err := yaml.Unmarshal(raw, &out); err != nil { + return fmt.Errorf("--%s '%s' is invalid: failed to decode yaml: %w", flagName, entry, err) + } else if err := mergo.Merge(&spec, out, mergo.WithOverride); err != nil { + return fmt.Errorf("--%s '%s' failed to apply: %w", flagName, entry, err) + } + } + return nil +} + +func parseAndApplyOverrideProperty(entry string, flagName string, spec map[string]interface{}) error { + parts := strings.SplitN(entry, "=", 2) + if len(parts) != 2 { + return fmt.Errorf("--%s '%s' is invalid, expected a =-separated path and value", flagName, entry) + } + if parts[1] == "" { + slog.Info(fmt.Sprintf("Overriding '%s' in workload", parts[0])) + if err := writePathInStruct(spec, parseDotPathParts(parts[0]), true, nil); err != nil { + return fmt.Errorf("--%s '%s' could not be applied: %w", flagName, entry, err) + } + } else { + var value interface{} + if err := yaml.Unmarshal([]byte(parts[1]), &value); err != nil { + return fmt.Errorf("--%s '%s' is invalid, failed to unmarshal value as json: %w", flagName, entry, err) + } + slog.Info(fmt.Sprintf("Overriding '%s' in workload", parts[0])) + if err := writePathInStruct(spec, parseDotPathParts(parts[0]), false, value); err != nil { + return fmt.Errorf("--%s '%s' could not be applied: %w", flagName, entry, err) + } + } + return nil +} diff --git a/internal/command/init.go b/internal/command/init.go index 12353c5..1f2f6e0 100644 --- a/internal/command/init.go +++ b/internal/command/init.go @@ -1,6 +1,7 @@ package command import ( + _ "embed" "errors" "fmt" "log/slog" @@ -11,6 +12,7 @@ import ( "github.com/spf13/cobra" "github.com/score-spec/score-compose/internal/project" + "github.com/score-spec/score-compose/internal/provisioners/loader" ) const DefaultScoreFileContent = `# Score provides a developer-centric and platform-agnostic @@ -50,6 +52,9 @@ service: resources: {} ` +//go:embed 99-default.provisioners.yaml +var defaultProvisionersContent string + var initCmd = &cobra.Command{ Use: "init", Args: cobra.NoArgs, @@ -103,7 +108,7 @@ acts as a namespace when multiple score files and containers are used. slog.Info(fmt.Sprintf("Writing new state directory '%s'", project.DefaultRelativeStateDirectory)) wd, _ := os.Getwd() - sd := &project.StateDirectory{ + sd = &project.StateDirectory{ Path: project.DefaultRelativeStateDirectory, State: project.State{ Workloads: map[string]project.ScoreWorkloadState{}, @@ -120,6 +125,12 @@ acts as a namespace when multiple score files and containers are used. if err := sd.Persist(); err != nil { return fmt.Errorf("failed to persist new compose project name: %w", err) } + + dst := "99-default" + loader.DefaultSuffix + slog.Info(fmt.Sprintf("Writing default provisioners yaml file '%s'", dst)) + if err := os.WriteFile(filepath.Join(sd.Path, dst), []byte(defaultProvisionersContent), 0644); err != nil { + return fmt.Errorf("failed to write provisioners: %w", err) + } } if _, err := os.ReadFile(initCmdScoreFile); err != nil { @@ -135,6 +146,13 @@ acts as a namespace when multiple score files and containers are used. } else { slog.Info(fmt.Sprintf("Found existing Score file '%s'", initCmdScoreFile)) } + + if provs, err := loader.LoadProvisionersFromDirectory(sd.Path, loader.DefaultSuffix); err != nil { + return fmt.Errorf("failed to load existing provisioners: %w", err) + } else { + slog.Debug(fmt.Sprintf("Successfully loaded %d resource provisioners", len(provs))) + } + slog.Info(fmt.Sprintf("Read more about the Score specification at https://docs.score.dev/docs/")) return nil diff --git a/internal/command/override_utils.go b/internal/command/override_utils.go new file mode 100644 index 0000000..fd832d1 --- /dev/null +++ b/internal/command/override_utils.go @@ -0,0 +1,106 @@ +package command + +import ( + "fmt" + "slices" + "strconv" + "strings" +) + +func parseDotPathParts(input string) []string { + // support escaping dot's to insert elements with a . in them. + input = strings.ReplaceAll(input, "\\\\", "\x01") + input = strings.ReplaceAll(input, "\\.", "\x00") + parts := strings.Split(input, ".") + for i, part := range parts { + part = strings.ReplaceAll(part, "\x00", ".") + part = strings.ReplaceAll(part, "\x01", "\\") + parts[i] = part + } + return parts +} + +func writePathInStruct(input map[string]interface{}, path []string, isDelete bool, value interface{}) error { + if len(path) == 0 { + return fmt.Errorf("cannot change root node") + } + + // the current position in the tree + var current interface{} = input + + // a reference to the map that holds current + var parentMap map[string]interface{} + var parentKey string + + // first traverse to the right location + for _, s := range path[:len(path)-1] { + switch currentType := current.(type) { + case map[string]interface{}: + parentMap = currentType + parentKey = s + + next, ok := currentType[s] + if ok { + current = next + } else { + currentType[s] = make(map[string]interface{}) + current = currentType[s] + } + case []interface{}: + parentMap = nil + + idx, err := strconv.Atoi(s) + if err != nil { + return fmt.Errorf("cannot index '%s' in array", s) + } else if idx < 0 || idx >= len(currentType) { + return fmt.Errorf("cannot set '%s' in array: out of range", s) + } + current = currentType[idx] + default: + return fmt.Errorf("cannot lookup property or index '%s' in %T", s, currentType) + } + } + // then apply the change + + key := path[len(path)-1] + switch currentType := current.(type) { + case map[string]interface{}: + if isDelete { + delete(currentType, key) + } else { + currentType[key] = value + } + case []interface{}: + // This is where the bulk of the complexity comes from. Parsing validating and then navigating the slices. + idx, err := strconv.Atoi(key) + if err != nil { + return fmt.Errorf("cannot index '%s' in array", key) + } else if idx < -1 || idx >= len(currentType) { + return fmt.Errorf("cannot set '%s' in array: out of range", key) + } else if isDelete { + if idx == -1 { + return fmt.Errorf("cannot delete '%s' in array", key) + } else { + if parentMap != nil { + parentMap[parentKey] = slices.Delete(currentType, idx, idx+1) + } else { + return fmt.Errorf("override in nested arrays is not supported") + } + } + } else { + if idx == -1 { + if parentMap != nil { + parentMap[parentKey] = append(currentType, value) + } else { + return fmt.Errorf("override in nested arrays is not supported") + } + } else { + currentType[idx] = value + } + } + default: + return fmt.Errorf("cannot lookup property or index '%s' in %T", key, currentType) + } + + return nil +} diff --git a/internal/command/override_utils_test.go b/internal/command/override_utils_test.go new file mode 100644 index 0000000..c89109f --- /dev/null +++ b/internal/command/override_utils_test.go @@ -0,0 +1,117 @@ +package command + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseDotPathParts(t *testing.T) { + for _, tc := range []struct { + Input string + Expected []string + }{ + {"", []string{""}}, + {"a", []string{"a"}}, + {"a.b", []string{"a", "b"}}, + {"a.-1", []string{"a", "-1"}}, + {"a.b\\.c", []string{"a", "b.c"}}, + {"a.b\\\\.c", []string{"a", "b\\", "c"}}, + } { + t.Run(tc.Input, func(t *testing.T) { + assert.Equal(t, tc.Expected, parseDotPathParts(tc.Input)) + }) + } +} + +func TestWritePathInStruct(t *testing.T) { + for _, tc := range []struct { + Name string + Spec string + Path []string + Delete bool + Value interface{} + Expected string + ExpectedError error + }{ + { + Name: "simple object set", + Spec: `{"a":{"b":[{}]}}`, + Path: []string{"a", "b", "0", "c"}, + Value: "hello", + Expected: `{"a":{"b":[{"c":"hello"}]}}`, + }, + { + Name: "simple object delete", + Spec: `{"a":{"b":[{"c":"hello"}]}}`, + Path: []string{"a", "b", "0", "c"}, + Delete: true, + Expected: `{"a":{"b":[{}]}}`, + }, + { + Name: "simple array set", + Spec: `{"a":[{}]}`, + Path: []string{"a", "0"}, + Value: "hello", + Expected: `{"a":["hello"]}`, + }, + { + Name: "simple array append", + Spec: `{"a":["hello"]}`, + Path: []string{"a", "-1"}, + Value: "world", + Expected: `{"a":["hello","world"]}`, + }, + { + Name: "simple array delete", + Spec: `{"a":["hello", "world"]}`, + Path: []string{"a", "0"}, + Delete: true, + Expected: `{"a":["world"]}`, + }, + { + Name: "build object via path", + Spec: `{}`, + Path: []string{"a", "b"}, + Value: "hello", + Expected: `{"a":{"b":"hello"}}`, + }, + { + Name: "bad index str", + Spec: `{"a":[]}`, + Path: []string{"a", "b"}, + Value: "hello", + ExpectedError: fmt.Errorf("cannot index 'b' in array"), + }, + { + Name: "index out of range", + Spec: `{"a": [0]}`, + Path: []string{"a", "2"}, + Value: "hello", + ExpectedError: fmt.Errorf("cannot set '2' in array: out of range"), + }, + { + Name: "no append nested arrays", + Spec: `{"a":[[0]]}`, + Path: []string{"a", "0", "-1"}, + Value: "hello", + ExpectedError: fmt.Errorf("override in nested arrays is not supported"), + }, + } { + t.Run(tc.Name, func(t *testing.T) { + var inSpec map[string]interface{} + assert.NoError(t, json.Unmarshal([]byte(tc.Spec), &inSpec)) + err := writePathInStruct(inSpec, tc.Path, tc.Delete, tc.Value) + if tc.ExpectedError != nil { + assert.EqualError(t, err, tc.ExpectedError.Error()) + } else { + if assert.NoError(t, err) { + outSpec, _ := json.Marshal(inSpec) + assert.JSONEq(t, tc.Expected, string(outSpec)) + } + } + }) + } +} diff --git a/internal/command/root_test.go b/internal/command/root_test.go index 9d226d3..01cec4d 100644 --- a/internal/command/root_test.go +++ b/internal/command/root_test.go @@ -20,6 +20,7 @@ Usage: Available Commands: completion Generate the autocompletion script for the specified shell + generate Convert one or more Score files into a Docker compose manifest help Help about any command init Initialise a new score-compose project with local state directory and score file run Translate the SCORE file to docker-compose configuration