diff --git a/.golangci.yml b/.golangci.yml index 26f47185..bbdaa850 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -8,7 +8,7 @@ linters-settings: maligned: suggest-new: true dupl: - threshold: 100 + threshold: 200 goconst: min-len: 2 min-occurrences: 2 @@ -18,4 +18,5 @@ linters: disable: - maligned - lll - + - gochecknoinits + - gochecknoglobals diff --git a/Makefile b/Makefile index d7df6f80..7cc22501 100644 --- a/Makefile +++ b/Makefile @@ -79,7 +79,7 @@ clean: .PHONY: test ## Setup, run all tests and clean -test: setup runtests clean +test: clean setup runtests clean .PHONY: runtests runtests: @@ -89,3 +89,8 @@ runtests: ## Run gofmt on the cmd and pkg packages gofmt: @gofmt -s -w ./cmd ./pkg + +.PHONY: check +## Runs static code analysis checks (golangci-lint) +check: gofmt + @golangci-lint run --max-same-issues 0 --verbose diff --git a/cmd/datamon/cmd/bundle.go b/cmd/datamon/cmd/bundle.go index fe908cd2..2e9517e9 100644 --- a/cmd/datamon/cmd/bundle.go +++ b/cmd/datamon/cmd/bundle.go @@ -18,20 +18,23 @@ Every bundle is an entry in the history of a repository at a point in time. } var bundleOptions struct { - Id string + ID string DataPath string } +var bundleID = "bundle" +var destination = "destination" + func init() { rootCmd.AddCommand(bundleCmd) } -func addBundleFlag(cmd *cobra.Command) error { - cmd.Flags().StringVarP(&bundleOptions.Id, "hash", "i", "", "The hash id for the bundle") - return cmd.MarkFlagRequired("hash") +func addBundleFlag(cmd *cobra.Command) string { + cmd.Flags().StringVarP(&bundleOptions.ID, bundleID, "i", "", "The hash id for the bundle") + return bundleID } -func addDataPathFlag(cmd *cobra.Command) error { - cmd.Flags().StringVarP(&bundleOptions.DataPath, "destination", "d", "", "The path to the download folder") - return cmd.MarkFlagRequired("destination") +func addDataPathFlag(cmd *cobra.Command) string { + cmd.Flags().StringVarP(&bundleOptions.DataPath, destination, "d", "", "The path to the download folder") + return destination } diff --git a/cmd/datamon/cmd/bundle_download.go b/cmd/datamon/cmd/bundle_download.go index d82b17de..cb5bf83f 100644 --- a/cmd/datamon/cmd/bundle_download.go +++ b/cmd/datamon/cmd/bundle_download.go @@ -4,12 +4,13 @@ package cmd import ( "context" + "log" + "github.com/oneconcern/datamon/pkg/core" "github.com/oneconcern/datamon/pkg/storage/gcs" "github.com/oneconcern/datamon/pkg/storage/localfs" "github.com/spf13/afero" "github.com/spf13/cobra" - "log" ) // downloadBundleCmd is the command to download a specific bundle from Datamon and model it locally. The primary purpose @@ -24,7 +25,7 @@ var downloadBundleCmd = &cobra.Command{ sourceStore := gcs.New(repoParams.Bucket) destinationSource := localfs.New(afero.NewBasePathFs(afero.NewOsFs(), bundleOptions.DataPath)) - archiveBundle, err := core.NewArchiveBundle(repoParams.RepoName, bundleOptions.Id, sourceStore) + archiveBundle, err := core.NewArchiveBundle(repoParams.RepoName, bundleOptions.ID, sourceStore) if err != nil { log.Fatalln(err) } @@ -36,15 +37,23 @@ var downloadBundleCmd = &cobra.Command{ } func init() { + // Source - addBucketNameFlag(downloadBundleCmd) - addRepoNameOptionFlag(downloadBundleCmd) + requiredFlags := []string{addBucketNameFlag(downloadBundleCmd)} + requiredFlags = append(requiredFlags, addRepoNameOptionFlag(downloadBundleCmd)) // Bundle to download - addBundleFlag(downloadBundleCmd) + requiredFlags = append(requiredFlags, addBundleFlag(downloadBundleCmd)) // Destination - addDataPathFlag(downloadBundleCmd) + requiredFlags = append(requiredFlags, addDataPathFlag(downloadBundleCmd)) + + for _, flag := range requiredFlags { + err := downloadBundleCmd.MarkFlagRequired(flag) + if err != nil { + log.Fatalln(err) + } + } bundleCmd.AddCommand(downloadBundleCmd) } diff --git a/cmd/datamon/cmd/completion.go b/cmd/datamon/cmd/completion.go index d53c5978..28f2c206 100644 --- a/cmd/datamon/cmd/completion.go +++ b/cmd/datamon/cmd/completion.go @@ -9,22 +9,25 @@ import ( "github.com/spf13/cobra" ) +const bash = "bash" +const zsh = "zsh" + // completionCmd represents the completion command var completionCmd = &cobra.Command{ Use: "completion SHELL", - Short: "generate completions for the tpt command", + Short: "generate completions for the datamon command", Long: `Generate completions for your shell For bash add the following line to your ~/.bashrc - eval "$(tpt completion bash)" + eval "$(datamon completion bash)" For zsh add generate a file: - tpt completion zsh > /usr/local/share/zsh/site-functions/_tpt + datamon completion zsh > /usr/local/share/zsh/site-functions/_datamon `, - ValidArgs: []string{"bash", "zsh"}, + ValidArgs: []string{bash, zsh}, Args: cobra.OnlyValidArgs, Run: func(cmd *cobra.Command, args []string) { @@ -34,16 +37,18 @@ var completionCmd = &cobra.Command{ os.Exit(1) } shell := args[0] - if shell != "bash" && shell != "zsh" { + if shell != bash && shell != zsh { // #nosec fmt.Fprintln(os.Stderr, "the only supported shells are bash and zsh") } - if shell == "bash" { - rootCmd.GenBashCompletion(os.Stdout) + if shell == bash { + err := rootCmd.GenBashCompletion(os.Stdout) + fmt.Fprintln(os.Stderr, "failed to generate bash completion:", err) } - if shell == "zsh" { - rootCmd.GenZshCompletion(os.Stdout) + if shell == zsh { + err := rootCmd.GenZshCompletion(os.Stdout) + fmt.Fprintln(os.Stderr, "failed to generate zsh completion:", err) } }, } diff --git a/cmd/datamon/cmd/fschecks.go b/cmd/datamon/cmd/fschecks.go index b9431531..297158ac 100644 --- a/cmd/datamon/cmd/fschecks.go +++ b/cmd/datamon/cmd/fschecks.go @@ -7,6 +7,7 @@ import ( "os" ) +// DieIfNotAccessible exits the process if the path is not accessible. func DieIfNotAccessible(path string) { _, err := os.Stat(path) if err != nil { diff --git a/cmd/datamon/cmd/model_deploy.go b/cmd/datamon/cmd/model_deploy.go index f0ca36f2..83442f1c 100644 --- a/cmd/datamon/cmd/model_deploy.go +++ b/cmd/datamon/cmd/model_deploy.go @@ -3,12 +3,14 @@ package cmd import ( "bytes" "context" + "github.com/aws/aws-sdk-go/aws" "github.com/oneconcern/datamon/pkg/config" "github.com/oneconcern/datamon/pkg/kubeless" "github.com/oneconcern/datamon/pkg/storage/sthree" "github.com/spf13/cobra" "github.com/spf13/viper" + "io/ioutil" "log" "os" @@ -36,7 +38,11 @@ var deployCmd = &cobra.Command{ log.Printf("Error while reading config file: %s", err) } - viper.ReadConfig(bytes.NewBuffer(configFileBytes)) + err = viper.ReadConfig(bytes.NewBuffer(configFileBytes)) + if err != nil { + log.Fatalln(err) + } + processor := config.Processor{} err = viper.Unmarshal(&processor) diff --git a/cmd/datamon/cmd/repo.go b/cmd/datamon/cmd/repo.go index e1cfb7ef..f9a78b9c 100644 --- a/cmd/datamon/cmd/repo.go +++ b/cmd/datamon/cmd/repo.go @@ -3,25 +3,23 @@ package cmd import ( - "github.com/oneconcern/pipelines/pkg/log" "github.com/spf13/cobra" ) -var logger log.Factory - var repoParams struct { Bucket string RepoName string } -func addRepoNameOptionFlag(cmd *cobra.Command) error { - flags := cmd.Flags() - flags.StringVarP(&repoParams.RepoName, "name", "n", "", "The name of this repository") - return cmd.MarkFlagRequired("name") +var name = "name" +var bucket = "bucket" + +func addRepoNameOptionFlag(cmd *cobra.Command) string { + cmd.Flags().StringVarP(&repoParams.RepoName, name, "n", "", "The name of this repository") + return name } -func addBucketNameFlag(cmd *cobra.Command) error { - flags := cmd.Flags() - flags.StringVarP(&repoParams.RepoName, "bucket", "b", "", "The name of the bucket used by datamon") - return cmd.MarkFlagRequired("bucket") +func addBucketNameFlag(cmd *cobra.Command) string { + cmd.Flags().StringVarP(&repoParams.RepoName, bucket, "b", "", "The name of the bucket used by datamon") + return bucket } diff --git a/cmd/datamon/cmd/root.go b/cmd/datamon/cmd/root.go index 6ad22f99..0be2e2cf 100644 --- a/cmd/datamon/cmd/root.go +++ b/cmd/datamon/cmd/root.go @@ -4,19 +4,10 @@ package cmd import ( "fmt" - "io" "log" "os" - "github.com/json-iterator/go" "github.com/spf13/cobra" - "github.com/spf13/viper" - yaml "gopkg.in/yaml.v2" -) - -var ( - cfgFile string - format string ) // rootCmd represents the base command when called without any subcommands @@ -45,123 +36,3 @@ func Execute() { func init() { log.SetFlags(0) } - -// initConfig reads in config file and ENV variables if set. -func initConfig() { - if os.Getenv("DATAMON_CONFIG") != "" { - // Use config file from the flag. - viper.SetConfigFile(os.Getenv("DATAMON_CONFIG")) - } else { - viper.AddConfigPath(".") - viper.AddConfigPath("$HOME/.datamon") - viper.AddConfigPath("/etc/datamon") - viper.SetConfigName(".datamon") - } - - viper.AutomaticEnv() // read in environment variables that match - // If a config file is found, read it in. - if err := viper.ReadInConfig(); err == nil { - log.Println("Using config file:", viper.ConfigFileUsed()) - } -} - -func print(data interface{}) error { - return formatters[format].Format(os.Stdout, data) -} - -func printe(data interface{}) error { - return formatters[format].Format(os.Stderr, data) -} - -// A Formatter is used to render output. They take an interface and output a byte array -// -// This byte array should be suitable for writing to a stream directly. -type Formatter interface { - Format(io.Writer, interface{}) error -} - -// FormatterFunc provides a way to use functions as a formatter interface -type FormatterFunc func(io.Writer, interface{}) error - -// Format the data with the function -func (f FormatterFunc) Format(w io.Writer, data interface{}) error { - return f(w, data) -} - -// JSONFormatter for printing as pretified json -func JSONFormatter() FormatterFunc { - return func(w io.Writer, data interface{}) error { - enc := jsoniter.NewEncoder(w) - enc.SetIndent("", " ") - return enc.Encode(data) - } -} - -// CompactJSONFormatter for priting as compact json -func CompactJSONFormatter() FormatterFunc { - return func(w io.Writer, data interface{}) error { - return jsoniter.NewEncoder(w).Encode(data) - } -} - -// YAMLFormatter for printing as yaml -func YAMLFormatter() FormatterFunc { - return func(w io.Writer, data interface{}) error { - enc := yaml.NewEncoder(w) - defer enc.Close() - if err := enc.Encode(data); err != nil { - return err - } - return enc.Close() - } -} - -var formatters map[string]Formatter - -func initDefaultFormatters() { - if formatters == nil { - formatters = make(map[string]Formatter) - formatters["json"] = JSONFormatter() - formatters["compactjson"] = CompactJSONFormatter() - formatters["yaml"] = YAMLFormatter() - } -} - -func knownFormatters() []string { - res := make([]string, len(formatters)) - var i int - for k := range formatters { - res[i] = k - i++ - } - return res -} - -func addFormatFlag(cmd *cobra.Command, defaultValue string, extraFormatters ...map[string]Formatter) error { - initDefaultFormatters() - if defaultValue == "" { - defaultValue = "yaml" - } - cmd.Flags().StringVarP(&format, "output", "o", "", "the output format to use") - prevPreRunE := cmd.PreRunE - cmd.PreRunE = func(cmd *cobra.Command, args []string) error { - if format == "" { - format = defaultValue - } - for _, ef := range extraFormatters { - for k, v := range ef { - formatters[k] = v - } - } - if prevPreRunE != nil { - if err := prevPreRunE(cmd, args); err != nil { - return err - } - } - if _, ok := formatters[format]; !ok { - return fmt.Errorf("%q is not a known output format, use one of: %v", format, knownFormatters()) - } - return nil - } - return nil -} diff --git a/pkg/cafs/fake_data_generator.go b/pkg/cafs/fake_data_generator.go index fba2de9b..0d8fe930 100644 --- a/pkg/cafs/fake_data_generator.go +++ b/pkg/cafs/fake_data_generator.go @@ -2,7 +2,9 @@ package cafs import ( "context" + "github.com/oneconcern/datamon/internal" + "io/ioutil" "os" "path/filepath" @@ -16,7 +18,7 @@ func GenerateFile(tgt string, size int, leafSize uint32) error { defer f.Close() if size <= int(leafSize) { // small single chunk file - _, err := f.WriteString(internal.RandStringBytesMaskImprSrc(int(size))) + _, err := f.WriteString(internal.RandStringBytesMaskImprSrc(size)) if err != nil { return err } @@ -33,7 +35,7 @@ func GenerateFile(tgt string, size int, leafSize uint32) error { } remaining := size - (parts * int(leafSize)) if remaining > 0 { - _, err := f.WriteString(internal.RandStringBytesMaskImprSrc(int(remaining))) + _, err := f.WriteString(internal.RandStringBytesMaskImprSrc(remaining)) if err != nil { return err } diff --git a/pkg/config/Processor.go b/pkg/config/Processor.go index cf0aa8ad..b1bdc977 100644 --- a/pkg/config/Processor.go +++ b/pkg/config/Processor.go @@ -1,20 +1,23 @@ package config +// Spec for what to process type Processor struct { - Name string `json: "name" yaml: "name"` - Branch string `json:"branch" yaml: "branch"` - Runtime string `json:"runtime" yaml: "runtime"` - Resources Resources `json:"resources" yaml: "resources"` - Content []string `json:"content" yaml: "content"` - Command []string `json:"command" yaml: "command"` + Name string `json:"name" yaml:"name"` + Branch string `json:"branch" yaml:"branch"` + Runtime string `json:"runtime" yaml:"runtime"` + Resources Resources `json:"resources" yaml:"resources"` + Content []string `json:"content" yaml:"content"` + Command []string `json:"command" yaml:"command"` } +// Resources for the processor type Resources struct { - Cpu ResourceLimit `json:"cpu,omitempty" yaml: "cpu,omitempty"` - Mem ResourceLimit `json:"mem,omitempty" yaml: "mem,omitempty"` + CPU ResourceLimit `json:"cpu,omitempty" yaml:"cpu,omitempty"` + Mem ResourceLimit `json:"mem,omitempty" yaml:"mem,omitempty"` } +// Resource limits type ResourceLimit struct { - Min string `json:"min,omitempty" yaml: "min,omitempty"` - Max string `json:"max,omitempty" yaml: "max,omitempty"` + Min string `json:"min,omitempty" yaml:"min,omitempty"` + Max string `json:"max,omitempty" yaml:"max,omitempty"` } diff --git a/pkg/core/bundle.go b/pkg/core/bundle.go index 70a1537a..cb605002 100644 --- a/pkg/core/bundle.go +++ b/pkg/core/bundle.go @@ -3,14 +3,15 @@ package core import ( "context" + "github.com/oneconcern/datamon/pkg/model" "github.com/oneconcern/datamon/pkg/storage" ) // Represents the bundle in it's archive state type ArchiveBundle struct { - repoId string - bundleId string + repoID string + bundleID string store storage.Store bundleDescriptor model.Bundle bundleEntries []model.BundleEntry @@ -40,8 +41,8 @@ func Publish(ctx context.Context, archiveBundle *ArchiveBundle, consumableBundle func NewArchiveBundle(repo string, bundle string, store storage.Store) (*ArchiveBundle, error) { return &ArchiveBundle{ - repoId: repo, - bundleId: bundle, + repoID: repo, + bundleID: bundle, store: store, }, nil } diff --git a/pkg/core/bundle_archive.go b/pkg/core/bundle_archive.go index 629f4556..6479b002 100644 --- a/pkg/core/bundle_archive.go +++ b/pkg/core/bundle_archive.go @@ -4,6 +4,7 @@ package core import ( "context" + "github.com/oneconcern/datamon/pkg/cafs" "github.com/oneconcern/datamon/pkg/model" "github.com/oneconcern/datamon/pkg/storage" @@ -13,8 +14,8 @@ import ( func unpackBundleDescriptor(ctx context.Context, archiveBundle *ArchiveBundle, consumableBundle ConsumableBundle) error { bundleDescriptorBuffer, err := storage.ReadTee(ctx, - archiveBundle.store, model.GetArchivePathToBundle(archiveBundle.repoId, archiveBundle.bundleId), - consumableBundle.Store, model.GetConsumablePathToBundle(archiveBundle.bundleId)) + archiveBundle.store, model.GetArchivePathToBundle(archiveBundle.repoID, archiveBundle.bundleID), + consumableBundle.Store, model.GetConsumablePathToBundle(archiveBundle.bundleID)) if err != nil { return err } @@ -32,8 +33,8 @@ func unpackBundleFileList(ctx context.Context, archiveBundle *ArchiveBundle, con var i int64 for i = 0; i < archiveBundle.bundleDescriptor.EntryFilesCount; i++ { bundleEntriesBuffer, err := storage.ReadTee(ctx, - archiveBundle.store, model.GetArchivePathToBundleFileList(archiveBundle.repoId, archiveBundle.bundleId, i), - consumableBundle.Store, model.GetConsumablePathToBundleFileList(archiveBundle.bundleId, i)) + archiveBundle.store, model.GetArchivePathToBundleFileList(archiveBundle.repoID, archiveBundle.bundleID, i), + consumableBundle.Store, model.GetConsumablePathToBundleFileList(archiveBundle.bundleID, i)) if err != nil { return err } @@ -63,6 +64,9 @@ func unpackDataFiles(ctx context.Context, archiveBundle *ArchiveBundle, consumab return err } reader, err := fs.Get(ctx, key) + if err != nil { + return err + } err = consumableBundle.Store.Put(ctx, bundleEntry.NameWithPath, reader) if err != nil { return err diff --git a/pkg/core/bundle_test.go b/pkg/core/bundle_test.go index d2c8405c..615cc410 100644 --- a/pkg/core/bundle_test.go +++ b/pkg/core/bundle_test.go @@ -3,6 +3,7 @@ package core_test import ( "bytes" "context" + "github.com/oneconcern/datamon/pkg/cafs" "github.com/oneconcern/datamon/pkg/core" "github.com/oneconcern/datamon/pkg/model" @@ -11,7 +12,9 @@ import ( "github.com/segmentio/ksuid" "github.com/spf13/afero" "github.com/stretchr/testify/require" + "gopkg.in/yaml.v2" + "io/ioutil" "os" "reflect" @@ -24,7 +27,7 @@ const ( entryFilesCount = 2 dataFilesCount = 4 repo = "bundle_test_repo" - bundleId = "bundle123" + bundleID = "bundle123" testRoot = "../../testdata/core" sourceDir = "../../testdata/core/bundle/source/" destinationDir = "../../testdata/core/bundle/destination/" @@ -40,7 +43,7 @@ func TestDownloadBundle(t *testing.T) { require.NoError(t, setup(t)) destinationStore := localfs.New(afero.NewBasePathFs(afero.NewOsFs(), destinationDir)) sourceStore := localfs.New(afero.NewBasePathFs(afero.NewOsFs(), sourceDir)) - archiveBundle, err := core.NewArchiveBundle(repo, bundleId, sourceStore) + archiveBundle, err := core.NewArchiveBundle(repo, bundleID, sourceStore) require.NoError(t, err) require.NoError(t, core.Publish(context.Background(), archiveBundle, core.ConsumableBundle{Store: destinationStore})) @@ -50,7 +53,7 @@ func TestDownloadBundle(t *testing.T) { func validatePublish(t *testing.T, store storage.Store) error { // Check Bundle File - reader, err := store.Get(context.Background(), model.GetConsumablePathToBundle(bundleId)) + reader, err := store.Get(context.Background(), model.GetConsumablePathToBundle(bundleID)) require.NoError(t, err) bundleDescriptorBuffer, err := ioutil.ReadAll(reader) @@ -76,7 +79,7 @@ func getTimeStamp() *time.Time { return timeStamp } -func generateDataFile(test *testing.T, store storage.Store) (model.BundleEntry, error) { +func generateDataFile(test *testing.T, store storage.Store) model.BundleEntry { // Generate data files to compare post publish, write to internal folder ksuid, err := ksuid.NewRandom() require.NoError(test, err) @@ -87,6 +90,7 @@ func generateDataFile(test *testing.T, store storage.Store) (model.BundleEntry, cafs.LeafSize(leafSize), cafs.Backend(store), ) + require.NoError(test, err) keys, err := cafs.GenerateCAFSChunks(internalDir+ksuid.String(), fs) require.NoError(test, err) // return the Bundle Entry @@ -95,7 +99,7 @@ func generateDataFile(test *testing.T, store storage.Store) (model.BundleEntry, NameWithPath: dataDir + ksuid.String(), FileMode: 0700, Size: uint(size), - }, nil + } } func setup(t *testing.T) error { @@ -108,20 +112,18 @@ func setup(t *testing.T) error { for i = 0; i < entryFilesCount; i++ { - bundleEntry, err := generateDataFile(t, blobStore) - require.NoError(t, err) + bundleEntry := generateDataFile(t, blobStore) bundleFileList := model.BundleEntries{BundleEntries: []model.BundleEntry{bundleEntry}} for j = 0; j < (dataFilesCount - 1); j++ { - bundleEntry, err = generateDataFile(t, blobStore) - require.NoError(t, err) + bundleEntry = generateDataFile(t, blobStore) bundleFileList.BundleEntries = append(bundleFileList.BundleEntries, bundleEntry) } buffer, err := yaml.Marshal(bundleFileList) require.NoError(t, err) - destinationPath := model.GetArchivePathToBundleFileList(repo, bundleId, i) + destinationPath := model.GetArchivePathToBundleFileList(repo, bundleID, i) require.NoError(t, sourceStore.Put(context.Background(), destinationPath, bytes.NewReader(buffer))) } @@ -132,13 +134,13 @@ func setup(t *testing.T) error { require.NoError(t, err) require.NoError(t, os.MkdirAll(destinationDir, 0700)) - return sourceStore.Put(context.Background(), model.GetArchivePathToBundle(repo, bundleId), bytes.NewReader(buffer)) + return sourceStore.Put(context.Background(), model.GetArchivePathToBundle(repo, bundleID), bytes.NewReader(buffer)) } func generateBundleDescriptor() model.Bundle { // Generate Bundle return model.Bundle{ - ID: bundleId, + ID: bundleID, LeafSize: leafSize, Message: "test bundle", Timestamp: *getTimeStamp(), diff --git a/pkg/fingerprint/fingerprint.go b/pkg/fingerprint/fingerprint.go index 15478f9c..097e7911 100644 --- a/pkg/fingerprint/fingerprint.go +++ b/pkg/fingerprint/fingerprint.go @@ -84,9 +84,9 @@ func (m *Maker) Process(path string) (digest []byte, err error) { go func() { for part, totalSize := 0, int64(0); ; part++ { partBuffer := make([]byte, m.leafSize) - n, err := r.Read(partBuffer) - if err != nil { - if err == io.EOF { + n, e := r.Read(partBuffer) + if e != nil { + if e == io.EOF { break } return @@ -125,7 +125,7 @@ func (m *Maker) Process(path string) (digest []byte, err error) { b := make([]byte, len(digestHash)*sz) for index, val := range digestHash { offset := sz * index - copy(b[offset:offset+sz], val[:]) + copy(b[offset:offset+sz], val) } rootBlake, err := blake2b.New(&blake2b.Config{ @@ -177,7 +177,7 @@ func (m *Maker) processChunk(rx <-chan chunkInput, tx chan<- chunkOutput) { Tree: &blake2b.Tree{ Fanout: 0, MaxDepth: 2, - LeafSize: uint32(c.leafSize), + LeafSize: c.leafSize, NodeOffset: uint64(c.part), NodeDepth: 0, InnerHashSize: m.size, diff --git a/pkg/kubeless/file_op.go b/pkg/kubeless/file_op.go index f85640e6..6ad1aeec 100644 --- a/pkg/kubeless/file_op.go +++ b/pkg/kubeless/file_op.go @@ -3,7 +3,9 @@ package kubeless import ( "archive/zip" "fmt" + "github.com/bmatcuk/doublestar" + "io" "io/ioutil" "log" diff --git a/pkg/model/bundle.go b/pkg/model/bundle.go index 2027c0d7..a347ca4f 100644 --- a/pkg/model/bundle.go +++ b/pkg/model/bundle.go @@ -15,13 +15,13 @@ type Bundle struct { Timestamp time.Time `json:"timestamp,omitempty" yaml:"timestamp,omitempty"` Committers []Contributor `json:"committers" yaml:"committers"` EntryFilesCount int64 `json:"entryfilescount" yaml:"entryfilescount"` - _ struct{} `json:"-" yaml:"-"` + _ struct{} } // List of all files part of a bundle. type BundleEntries struct { BundleEntries []BundleEntry `json:"BundleEntries" yaml:"BundleEntries"` - _ struct{} `json:"-" yaml:"-"` + _ struct{} } // List of files, directories (empty) skipped @@ -30,14 +30,14 @@ type BundleEntry struct { NameWithPath string `json:"name" yaml:"name"` FileMode os.FileMode `json:"mode" yaml:"mode"` Size uint `json:"size" yaml:"size"` - _ struct{} `json:"-" yaml:"-"` + _ struct{} } // Contributor who created the object type Contributor struct { - Name string `json:"name" yaml:"name"` - Email string `json:"email" yaml:"email"` - _ struct{} `json:"-" yaml:"-"` + Name string `json:"name" yaml:"name"` + Email string `json:"email" yaml:"email"` + _ struct{} } func (c *Contributor) String() string { @@ -50,21 +50,21 @@ func (c *Contributor) String() string { return fmt.Sprintf("%s <%s>", c.Name, c.Email) } -func GetConsumablePathToBundle(bundleId string) string { - return fmt.Sprint("./.datamon/", bundleId, ".json") +func GetConsumablePathToBundle(bundleID string) string { + return fmt.Sprint("./.datamon/", bundleID, ".json") } -func GetConsumablePathToBundleFileList(bundleId string, index int64) string { - return fmt.Sprint("./.datamon/", bundleId, "-bundle-files-", index, ".json") +func GetConsumablePathToBundleFileList(bundleID string, index int64) string { + return fmt.Sprint("./.datamon/", bundleID, "-bundle-files-", index, ".json") } -func GetArchivePathToBundle(repo string, bundleId string) string { - return fmt.Sprint(repo, "-bundles/", bundleId, "/bundle.json") +func GetArchivePathToBundle(repo string, bundleID string) string { + return fmt.Sprint(repo, "-bundles/", bundleID, "/bundle.json") } -func GetArchivePathToBundleFileList(repo string, bundleId string, index int64) string { +func GetArchivePathToBundleFileList(repo string, bundleID string, index int64) string { // -bundles//bundlefiles-.json - return fmt.Sprint(repo, "-bundles/", bundleId, "/bundle-files-", index, ".json") + return fmt.Sprint(repo, "-bundles/", bundleID, "/bundle-files-", index, ".json") } func GetArchivePathBlobPrefix() string { diff --git a/pkg/model/entry.go b/pkg/model/entry.go index 435e6e3c..568b4f63 100644 --- a/pkg/model/entry.go +++ b/pkg/model/entry.go @@ -14,7 +14,7 @@ type Entry struct { Hash string `json:"hash" yaml:"hash"` Mtime time.Time `json:"mtime" yaml:"mtime"` Mode FileMode `json:"mode" yaml:"mode"` - _ struct{} `json:"-" yaml:"-"` + _ struct{} } // Entries represent a collectin of entries @@ -42,7 +42,7 @@ func (entries Entries) Hash() (string, error) { // Iterate over hashes of all underlying nodes for _, leave := range entries { //#nosec - hasher.Write(UnsafeStringToBytes(leave.Hash)) + _, _ = hasher.Write(UnsafeStringToBytes(leave.Hash)) } return hex.EncodeToString(hasher.Sum(nil)), nil diff --git a/pkg/model/snapshot.go b/pkg/model/snapshot.go index f7733403..8a8de573 100644 --- a/pkg/model/snapshot.go +++ b/pkg/model/snapshot.go @@ -12,7 +12,7 @@ type Snapshot struct { PreviousCommits []string `json:"previous_commits,omitempty" yaml:"previous_commits,omitempty"` Entries Entries `json:"entries,omitempty" yaml:"entries,omitempty"` Timestamp time.Time `json:"timestamp,omitempty" yaml:"timestamp,omitempty"` - _ struct{} `json:"-" yaml:"-"` + _ struct{} } // Snapshots represents a collection of snapshots diff --git a/pkg/model/types.go b/pkg/model/types.go index f4ef370a..feccbe2e 100644 --- a/pkg/model/types.go +++ b/pkg/model/types.go @@ -2,6 +2,7 @@ package model import ( "github.com/json-iterator/go" + "os" "strconv" ) @@ -12,7 +13,7 @@ type Repo struct { Description string `json:"description,omitempty" yaml:"description,omitempty"` TagsRef map[string]string `json:"tags,omitempty" yaml:"tags,omitempty"` BranchRef map[string]string `json:"branches,omitempty" yaml:"branches,omitempty"` - _ struct{} `json:"-" yaml:"-"` + _ struct{} } // ChangeSet captures the data for a change set in a bundle diff --git a/pkg/model/unsafe.go b/pkg/model/unsafe.go index 09cbc8a7..4703ac22 100644 --- a/pkg/model/unsafe.go +++ b/pkg/model/unsafe.go @@ -14,12 +14,6 @@ func UnsafeStringToBytes(s string) []byte { return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{ Len: ln, Cap: ln, - Data: (*(*reflect.StringHeader)(unsafe.Pointer(&s))).Data, + Data: (*reflect.StringHeader)(unsafe.Pointer(&s)).Data, })) } - -// UnsafeBytesToString converts []byte to string without a memcopy -func UnsafeBytesToString(b []byte) string { - /* #nosec */ - return *(*string)(unsafe.Pointer(&reflect.StringHeader{Data: uintptr(unsafe.Pointer(&b[0])), Len: len(b)})) -} diff --git a/pkg/storage/gcs/store.go b/pkg/storage/gcs/store.go index e19e6b1c..0a9db95a 100644 --- a/pkg/storage/gcs/store.go +++ b/pkg/storage/gcs/store.go @@ -4,10 +4,13 @@ package gcs import ( gcsStorage "cloud.google.com/go/storage" + "context" "errors" + "github.com/oneconcern/datamon/pkg/storage" "google.golang.org/api/option" + "io" ) diff --git a/pkg/storage/localfs/store.go b/pkg/storage/localfs/store.go index 7a843d87..46a2f50c 100644 --- a/pkg/storage/localfs/store.go +++ b/pkg/storage/localfs/store.go @@ -51,8 +51,6 @@ func (l *localFS) Put(ctx context.Context, key string, source io.Reader) error { if err := l.fs.MkdirAll(filepath.Dir(key), 0700); err != nil { return fmt.Errorf("ensuring directories for %q: %v", key, err) } - } else { - dir = "." } target, err := l.fs.OpenFile(key, os.O_EXCL|os.O_CREATE|os.O_WRONLY|os.O_SYNC, 0600) if err != nil { diff --git a/pkg/storage/sthree/store_test.go b/pkg/storage/sthree/store_test.go index 11d61f2b..5e5b6287 100644 --- a/pkg/storage/sthree/store_test.go +++ b/pkg/storage/sthree/store_test.go @@ -117,15 +117,16 @@ func setupStore(t testing.TB) (storage.Store, func()) { runtime.Goexit() } cl := s3.New(sess) - cl.CreateBucket(&s3.CreateBucketInput{ + _, err = cl.CreateBucket(&s3.CreateBucketInput{ Bucket: bucket, CreateBucketConfiguration: &s3.CreateBucketConfiguration{ LocationConstraint: aws.String("us-west-2"), }, }) + require.NoError(t, err) cleanup := func() { - cl.DeleteBucket(&s3.DeleteBucketInput{ + _, _ = cl.DeleteBucket(&s3.DeleteBucketInput{ Bucket: bucket, }) }