diff --git a/.env b/.env new file mode 100644 index 0000000..f096ff7 --- /dev/null +++ b/.env @@ -0,0 +1,3 @@ +DATABASE=db.example.com +NULL= +MODE=debug diff --git a/.travis.yml b/.travis.yml index ef6d16c..2e77bb5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,6 @@ language: go go: - - 1.4 + - 1.5 - tip sudo: false install: diff --git a/Godeps/Godeps.json b/Godeps/Godeps.json new file mode 100644 index 0000000..11be38a --- /dev/null +++ b/Godeps/Godeps.json @@ -0,0 +1,28 @@ +{ + "ImportPath": "github.com/kreuzwerker/envplate", + "GoVersion": "go1.5.3", + "Deps": [ + { + "ImportPath": "github.com/davecgh/go-spew/spew", + "Rev": "65ca732a33a40c2a5b9e036f236f975e7e85cf6c" + }, + { + "ImportPath": "github.com/joho/godotenv", + "Comment": "v1-10-g4ed1339", + "Rev": "4ed13390c0acd2ff4e371e64d8b97c8954138243" + }, + { + "ImportPath": "github.com/pmezard/go-difflib/difflib", + "Rev": "792786c7400a136282c1664665ae0a8db921c6c2" + }, + { + "ImportPath": "github.com/stretchr/testify/assert", + "Comment": "v1.1.3-2-g9f9027f", + "Rev": "9f9027faeb0dad515336ed2f28317f9f8f527ab4" + }, + { + "ImportPath": "github.com/yawn/envmap", + "Rev": "45241142781edb43b57fefaf356a7526435ff719" + } + ] +} diff --git a/Godeps/Readme b/Godeps/Readme new file mode 100644 index 0000000..4cdaa53 --- /dev/null +++ b/Godeps/Readme @@ -0,0 +1,5 @@ +This directory tree is generated automatically by godep. + +Please do not edit. + +See https://github.com/tools/godep for more information. diff --git a/Makefile b/Makefile index 93e6244..bcb0be7 100644 --- a/Makefile +++ b/Makefile @@ -20,4 +20,4 @@ release: github-release upload --user $(USER) --repo $(REPO) --tag $(VERSION) -s $(TOKEN) --name ep-linux --file out/linux/ep test: - DATABASE=db.example.com NULL= MODE=debug go test -cover + go test -cover diff --git a/bin/envplate.go b/bin/envplate.go deleted file mode 100644 index 1d23b22..0000000 --- a/bin/envplate.go +++ /dev/null @@ -1,50 +0,0 @@ -package main - -import ( - "flag" - "fmt" - "os" - "syscall" - - env "github.com/kreuzwerker/envplate" - "github.com/yawn/doubledash" -) - -var build string - -func main() { - - flagArgs, execArgs := doubledash.Args, doubledash.Xtra - os.Args = flagArgs - - var ( - help = flag.Bool("h", false, "display usage") - backup = flag.Bool("b", false, "create a backup file when using inline mode") - strict = flag.Bool("s", false, "strict-mode - fail when falling back on defaults") - dryRun = flag.Bool("d", false, "dry-run - output templates to stdout instead of inline replacement") - verbose = flag.Bool("v", false, "verbose logging") - ) - - flag.Parse() - - env.Config.Backup = *backup - env.Config.DryRun = *dryRun - env.Config.Strict = *strict - env.Config.Verbose = *verbose - - if *help { - fmt.Fprintf(os.Stderr, "Usage of %s (%s):\n", os.Args[0], build) - flag.PrintDefaults() - } else { - env.Apply(flag.Args()) - } - - if len(execArgs) > 0 { - - if err := syscall.Exec(execArgs[0], execArgs, os.Environ()); err != nil { - env.Log(env.ERROR, "Cannot exec '%v': %v", execArgs, err) - } - - } - -} diff --git a/bin/ep.go b/bin/ep.go new file mode 100644 index 0000000..6b63352 --- /dev/null +++ b/bin/ep.go @@ -0,0 +1,90 @@ +package main + +import ( + "fmt" + "log" + "os" + "syscall" + + "github.com/kreuzwerker/envplate" + "github.com/spf13/cobra" + "github.com/yawn/doubledash" +) + +var ( + build string + version string +) + +func main() { + + var ( // flags + backup *bool + dryRun *bool + strict *bool + verbose *bool + ) + + os.Args = doubledash.Args + + // commands + root := &cobra.Command{ + Use: "ep", + Short: "envplate provides trivial templating for configuration files using environment keys", + PersistentPreRun: func(cmd *cobra.Command, args []string) { + envplate.Logger.Verbose = *verbose + }, + } + + parse := &cobra.Command{ + Use: "parse", + Short: "Parse globs and exec after doubledash", + Run: func(cmd *cobra.Command, args []string) { + + var h = envplate.Handler{ + Backup: *backup, + DryRun: *dryRun, + Strict: *strict, + } + + if err := h.Apply(args); err != nil { + os.Exit(1) + } + + if h.DryRun { + os.Exit(0) + } + + if len(doubledash.Xtra) > 0 { + + if err := syscall.Exec(doubledash.Xtra[0], doubledash.Xtra, os.Environ()); err != nil { + log.Fatalf("Cannot exec '%v': %v", doubledash.Xtra, err) + } + + } + + }, + } + + version := &cobra.Command{ + Use: "version", + Short: "Print the version information of ep", + Run: func(cmd *cobra.Command, args []string) { + fmt.Printf("Envplate %s (%s)\n", version, build) + }, + } + + root.AddCommand(parse) + root.AddCommand(version) + + // flag parsing + backup = parse.Flags().BoolP("backup", "b", false, "Create a backup file when using inline mode") + dryRun = parse.Flags().BoolP("dry-run", "d", false, "Dry-run - output templates to stdout instead of inline replacement") + strict = parse.Flags().BoolP("strict", "s", false, "Strict-mode - fail when falling back on defaults") + verbose = parse.Flags().BoolP("verbose", "v", false, "Verbose logging") + + if err := root.Execute(); err != nil { + log.Fatalf("Failed to start the application: %v", err) + } + +} diff --git a/conf.go b/conf.go deleted file mode 100644 index 231509a..0000000 --- a/conf.go +++ /dev/null @@ -1,5 +0,0 @@ -package envplate - -var Config struct { - Backup, DryRun, Strict, Verbose bool -} diff --git a/envplate.go b/envplate.go index d43fd72..f83da3c 100644 --- a/envplate.go +++ b/envplate.go @@ -1,8 +1,6 @@ package envplate import ( - "fmt" - "io" "io/ioutil" "os" "path/filepath" @@ -12,14 +10,19 @@ import ( ) const ( - NoDefaultDefined = "" - NotAnEscapeSequence = "" - DefaultValueSyntax = ":-" + noDefaultDefined = "" + notAnEscapeSequence = "" ) +type Handler struct { + Backup bool + DryRun bool + Strict bool +} + var exp = regexp.MustCompile(`(\\*)\$\{(.+?)(?:(\:\-)(.*?))?\}`) -func Apply(globs []string) { +func (h *Handler) Apply(globs []string) error { matches := false @@ -28,7 +31,7 @@ func Apply(globs []string) { files, err := filepath.Glob(pattern) if err != nil { - Log(ERROR, err.Error()) + return err } for _, name := range files { @@ -39,8 +42,8 @@ func Apply(globs []string) { matches = true - if err := parse(name); err != nil { - Log(ERROR, "Error while parsing '%s': %v", name, err) + if err := h.parse(name); err != nil { + return Log(ERROR, "Error while parsing '%s': %v", name, err) } } @@ -48,67 +51,39 @@ func Apply(globs []string) { } if !matches { - Log(ERROR, "Zero files matched passed globs '%v'", globs) - } - -} - -func createBackup(file string) error { - - source, err := os.Open(file) - - if err != nil { - return err - } - - defer source.Close() - - target, err := os.Create(fmt.Sprintf("%s.bak", file)) - - if err != nil { - return err - } - - defer target.Close() - - if _, err := io.Copy(target, source); err != nil { - return err - } - - if err := os.Chmod(target.Name(), filemode(source.Name())); err != nil { - return err + return Log(ERROR, "Zero files matched passed globs '%v'", globs) } return nil } -func parse(file string) error { +func (h *Handler) parse(file string) error { env := envmap.Import() content, err := ioutil.ReadFile(file) if err != nil { - return fmt.Errorf("Cannot open %s: %v", file, err) + return Log(ERROR, "Cannot open %s: %v", file, err) } Log(DEBUG, "Parsing environment references in '%s'", file) + var errors []error + parsed := exp.ReplaceAllStringFunc(string(content), func(match string) string { var ( - esc, key, sep, def = capture(match) - value, keyDefined = env[key] + esc, key, sep, def = capture(match) + value, keyDefined = env[key] ) if len(esc)%2 == 1 { escaped := escape(match) - if escaped == NotAnEscapeSequence { - - Log(ERROR, "Tried to escape '%s', but was no escape sequence") - + if escaped == notAnEscapeSequence { + errors = append(errors, Log(ERROR, "Tried to escape '%s', but was no escape sequence", content)) } return escaped @@ -117,12 +92,12 @@ func parse(file string) error { if !keyDefined { - if sep == NoDefaultDefined { - Log(ERROR, "'%s' requires undeclared environment variable '%s', no default is given", file, key) + if sep == noDefaultDefined { + errors = append(errors, Log(ERROR, "'%s' requires undeclared environment variable '%s', no default is given", file, key)) } else { - if Config.Strict { - Log(ERROR, "'%s' requires undeclared environment variable '%s', but cannot use default '%s' (strict-mode)", file, key, def) + if h.Strict { + errors = append(errors, Log(ERROR, "'%s' requires undeclared environment variable '%s', but cannot use default '%s' (strict-mode)", file, key, def)) } else { Log(DEBUG, "'%s' requires undeclared environment variable '%s', using default '%s'", file, key, def) value = def @@ -142,11 +117,12 @@ func parse(file string) error { }) - if Config.DryRun { - Log(INFO, "Expanding all references in '%s' would look like this:\n%s", file, parsed) + if h.DryRun { + Log(INFO, "Expanding all references in '%s' without doing anything (dry-run)", file) + Log(RAW, parsed) } else { - if Config.Backup { + if h.Backup { Log(DEBUG, "Creating backup of '%s'", file) @@ -156,8 +132,20 @@ func parse(file string) error { } - return ioutil.WriteFile(file, []byte(parsed), filemode(file)) + mode, err := filemode(file) + + if err != nil { + return err + } + + if err := ioutil.WriteFile(file, []byte(parsed), mode); err != nil { + return err + } + + } + if len(errors) > 0 { + return errors[0] } return nil @@ -183,17 +171,13 @@ func escape(s string) (escaped string) { matches := expEscaped.FindStringSubmatch(s) if matches == nil { - - return NotAnEscapeSequence - + return notAnEscapeSequence } bss := matches[1] if len(bss)%2 != 1 { - - return NotAnEscapeSequence - + return notAnEscapeSequence } parsedBss := bss[:len(bss)-1][:(len(bss)-1)/2] @@ -205,15 +189,3 @@ func escape(s string) (escaped string) { return escaped } - -func filemode(file string) os.FileMode { - - fileinfo, err := os.Stat(file) - - if err != nil { - Log(ERROR, "Cannot stat '%s': %v", file, err) - } - - return fileinfo.Mode() - -} diff --git a/envplate_test.go b/envplate_test.go index d339703..c9fbd47 100644 --- a/envplate_test.go +++ b/envplate_test.go @@ -1,328 +1,239 @@ package envplate import ( - "bytes" "fmt" - "io/ioutil" - "log" - "os" "path/filepath" "testing" + _ "github.com/joho/godotenv/autoload" "github.com/stretchr/testify/assert" ) -func _delete(t *testing.T, name string) { - - if err := os.Remove(name); err != nil { - t.Fatalf("Error while deleting '%s': %v", name, err) - } - -} - -func _exists(name string) bool { - _, err := os.Stat(name) - return err == nil +func init() { + Logger.Verbose = true } -func _read(t *testing.T, name string) string { - - content, err := ioutil.ReadFile(name) - - if err != nil { - t.Fatalf("Error while reading '%s': %v", name, err) - } - - return string(content) - -} - -func _template(t *testing.T) (string, string) { - - tpl := `Database1=${DATABASE} - Mode=${MODE} - Null=${NULL} - Database2=${DATABASE} - Database3=$NOT_A_VARIABLE - Database4=${ANOTHER_DATABASE:-db2.example.com} - Database5=${DATABASE:-db2.example.com}` +func TestApplyNoGlobs(t *testing.T) { - return _write(t, "parse.txt", tpl, 0644), tpl + assert := assert.New(t) + handler := &Handler{} -} + _tmpdir(t, func(tempdir string) { -func _template_defaults(t *testing.T) (string, string) { + globs := []string{ + tempdir, + filepath.Join(tempdir, "*.not-here"), + filepath.Join(tempdir, "*.not-there"), + } - tpl := `Double1=${DATABASE} Double2=${DATABASE} - Double3=${DATABASE:-db2.example.com} Double4=${DATABASE:-db2.example.com} - DoubleDefault1=${ANOTHER_DATABASE:-db2-example.com} DoubleDefault2=${ANOTHER_DATABASE:-db2-example.com} - EmptyDefault=${UNDEFINED_VARIABLE:-}` + _redirect_logs(func(logs func() string, raw func() string) { - return _write(t, "parse.txt", tpl, 0644), tpl + handler.Apply(globs) -} + msg := fmt.Sprintf("[ ERROR ] Zero files matched passed globs '[%s %s %s]'", + globs[0], + globs[1], + globs[2]) -func _template_escaping(t *testing.T) (string, string) { + assert.Contains(logs(), msg) - tpl := `${DATABASE} - Escaped1=\${DATABASE} EscapedDefault1=\${DATABASE:-db2.example.com} EscapedDefaultReplaced1=\${ANOTHER_DATABASE:-db2.example.com} - NoEscape1=\\${DATABASE} NoEscapeDefault1=\\${DATABASE:-db2.example.com} NoEscapeDefaultReplaced1=\\${ANOTHER_DATABASE:-db2.example.com} - Escaped2=\\\${DATABASE} EscapedDefault2=\\\${DATABASE:-db2.example.com} EscapedDefaultReplaced2=\\\${ANOTHER_DATABASE:-db2.example.com} - NoEscape2=\\\\${DATABASE} NoEscapeDefault2=\\\\${DATABASE:-db2.example.com} NoEscapeDefaultReplaced2=\\\\${ANOTHER_DATABASE:-db2.example.com} - Escaped3=\\\\\${DATABASE} EscapedDefault3=\\\\\${DATABASE:-db2.example.com} EscapedDefaultReplaced3=\\\\\${ANOTHER_DATABASE:-db2.example.com} - NoEscape3=\\\\\\${DATABASE} NoEscapeDefault3=\\\\\\${DATABASE:-db2.example.com} NoEscapeDefaultReplaced3=\\\\\\${ANOTHER_DATABASE:-db2.example.com}` + }) - return _write(t, "parse.txt", tpl, 0644), tpl + }) } -func _write(t *testing.T, name, content string, mode os.FileMode) string { +func TestCapture(t *testing.T) { - file, err := ioutil.TempFile("", name) + assert := assert.New(t) - if err != nil { - t.Fatalf("Error while opening '%s': %v", name, err) + var tt = []struct { + in, e, v, s, d string + }{ + {"${FOO}", "", "FOO", noDefaultDefined, noDefaultDefined}, + {"${FOO:-bar}", "", "FOO", ":-", "bar"}, + {"${FOO:-at the bar}", "", "FOO", ":-", "at the bar"}, + {"${FOO_3000:-near the bar}", "", "FOO_3000", ":-", "near the bar"}, + {"${FOO:--1}", "", "FOO", ":-", "-1"}, + {"${FOO:-http://www.example.com/bar/gar/war?a=b}", "", "FOO", ":-", "http://www.example.com/bar/gar/war?a=b"}, + {`\${FOO}`, `\`, "FOO", noDefaultDefined, noDefaultDefined}, + {`\\${FOO:-bar}`, `\\`, "FOO", ":-", "bar"}, + {`\\\${FOO:-bar}`, `\\\`, "FOO", ":-", "bar"}, + {`\\\\${FOO:-bar}`, `\\\\`, "FOO", ":-", "bar"}, + {"foo${FOO}", "", "FOO", noDefaultDefined, noDefaultDefined}, } - if _, err := file.WriteString(content); err != nil { - t.Fatalf("Error while writing to '%s': %v", name, err) - } + for _, tt := range tt { - if err := file.Close(); err != nil { - t.Fatalf("Error while closing '%s': %v", name, err) - } + e, v, s, d := capture(tt.in) - if err := os.Chmod(file.Name(), mode); err != nil { - t.Fatalf("Error while chmod '%s': %v", name, err) - } + assert.Equal(tt.e, e) + assert.Equal(tt.v, v) + assert.Equal(tt.s, s) + assert.Equal(tt.d, d) - return file.Name() + } } -func TestCreateBackup(t *testing.T) { +func TestDryRun(t *testing.T) { assert := assert.New(t) + handler := &Handler{ + DryRun: true, + } - file := _write(t, "create-backup.txt", "hello world", 0644) - defer _delete(t, file) - - backup := fmt.Sprintf("%s.bak", file) + var ( + file = "test/template1.txt" + ) - assert.False(_exists(backup)) + _redirect_logs(func(logs func() string, raw func() string) { - err := createBackup(file) - defer _delete(t, backup) + err := handler.parse(file) + assert.NoError(err) - assert.NoError(err) + assert.Equal(`Database1=${DATABASE} +Mode=${MODE} +Null=${NULL} +Database2=${DATABASE} +Database3=$NOT_A_VARIABLE +Database4=${ANOTHER_DATABASE:-db2.example.com} +Database5=${DATABASE:-db2.example.com} +`, _read(t, file)) - content := _read(t, backup) + assert.Equal(`Database1=db.example.com +Mode=debug +Null= +Database2=db.example.com +Database3=$NOT_A_VARIABLE +Database4=db2.example.com +Database5=db.example.com +`, raw()) - assert.Equal("hello world", content) - assert.Equal(filemode(file).String(), filemode(backup).String()) + }) } -func TestApplyNoGlobs(t *testing.T) { +func TestEscape(t *testing.T) { assert := assert.New(t) - var buf bytes.Buffer - - ErrorFunc = func(format string, args ...interface{}) { - fmt.Fprintf(&buf, format, args...) + var tt = []struct { + in, e string + }{ + {`foo`, notAnEscapeSequence}, + {`${FOO}`, notAnEscapeSequence}, + {`\${FOO}`, `${FOO}`}, + {`\\${FOO}`, notAnEscapeSequence}, + {`\\\${FOO:-bar}`, `\${FOO:-bar}`}, + {`\\\\${FOO}`, notAnEscapeSequence}, + {`\\\\\${FOO}`, `\\${FOO}`}, + {`\\\\\\${FOO}`, notAnEscapeSequence}, + {`\\\\\\\${FOO:-bar}`, `\\\${FOO:-bar}`}, } - tempdir, err := ioutil.TempDir("", "") - assert.NoError(err) - - defer os.Remove(tempdir) - - globs := []string{ - tempdir, - filepath.Join(tempdir, "*.not-here"), - filepath.Join(tempdir, "*.not-there"), + for _, tt := range tt { + esc := escape(tt.in) + assert.Equal(tt.e, esc) } - Apply(globs) - - msg := fmt.Sprintf("[ ERROR ] Zero files matched passed globs '[%s %s %s]'", - globs[0], - globs[1], - globs[2]) - - assert.Equal(msg, buf.String()) - } func TestFullParse(t *testing.T) { - Config.Backup = true - Config.DryRun = false - Config.Strict = false - Config.Verbose = true - - ErrorFunc = log.Panicf - assert := assert.New(t) + handler := &Handler{ + Backup: true, + } - file, _ := _template(t) - defer _delete(t, file) - - backup := fmt.Sprintf("%s.bak", file) + var ( + file = "test/template1.txt" + ) - err := parse(file) + defer _restore(file) + err := handler.parse(file) assert.NoError(err) - assert.True(_exists(backup)) + assert.Equal(`Database1=db.example.com - Mode=debug - Null= - Database2=db.example.com - Database3=$NOT_A_VARIABLE - Database4=db2.example.com - Database5=db.example.com`, _read(t, file)) +Mode=debug +Null= +Database2=db.example.com +Database3=$NOT_A_VARIABLE +Database4=db2.example.com +Database5=db.example.com +`, _read(t, file)) } func TestFullParseDefaults(t *testing.T) { - Config.Backup = true - Config.DryRun = false - Config.Strict = false - Config.Verbose = true - - ErrorFunc = log.Panicf - assert := assert.New(t) + handler := &Handler{ + Backup: true, + } - file, _ := _template_defaults(t) - defer _delete(t, file) - - backup := fmt.Sprintf("%s.bak", file) + var ( + file = "test/template2.txt" + ) - err := parse(file) + defer _restore(file) + err := handler.parse(file) assert.NoError(err) - assert.True(_exists(backup)) + assert.Equal(`Double1=db.example.com Double2=db.example.com - Double3=db.example.com Double4=db.example.com - DoubleDefault1=db2-example.com DoubleDefault2=db2-example.com - EmptyDefault=`, _read(t, file)) +Double3=db.example.com Double4=db.example.com +DoubleDefault1=db2-example.com DoubleDefault2=db2-example.com +EmptyDefault= +`, _read(t, file)) } func TestFullParseEscapes(t *testing.T) { - Config.Backup = true - Config.DryRun = false - Config.Strict = false - Config.Verbose = true - - ErrorFunc = log.Panicf - assert := assert.New(t) + handler := &Handler{ + Backup: true, + } - file, _ := _template_escaping(t) - defer _delete(t, file) - - backup := fmt.Sprintf("%s.bak", file) + var ( + file = "test/template3.txt" + ) - err := parse(file) + defer _restore(file) + err := handler.parse(file) assert.NoError(err) - assert.True(_exists(backup)) + assert.Equal(`db.example.com - Escaped1=${DATABASE} EscapedDefault1=${DATABASE:-db2.example.com} EscapedDefaultReplaced1=${ANOTHER_DATABASE:-db2.example.com} - NoEscape1=\db.example.com NoEscapeDefault1=\db.example.com NoEscapeDefaultReplaced1=\db2.example.com - Escaped2=\${DATABASE} EscapedDefault2=\${DATABASE:-db2.example.com} EscapedDefaultReplaced2=\${ANOTHER_DATABASE:-db2.example.com} - NoEscape2=\\db.example.com NoEscapeDefault2=\\db.example.com NoEscapeDefaultReplaced2=\\db2.example.com - Escaped3=\\${DATABASE} EscapedDefault3=\\${DATABASE:-db2.example.com} EscapedDefaultReplaced3=\\${ANOTHER_DATABASE:-db2.example.com} - NoEscape3=\\\db.example.com NoEscapeDefault3=\\\db.example.com NoEscapeDefaultReplaced3=\\\db2.example.com`, _read(t, file)) +Escaped1=${DATABASE} EscapedDefault1=${DATABASE:-db2.example.com} EscapedDefaultReplaced1=${ANOTHER_DATABASE:-db2.example.com} +NoEscape1=\db.example.com NoEscapeDefault1=\db.example.com NoEscapeDefaultReplaced1=\db2.example.com +Escaped2=\${DATABASE} EscapedDefault2=\${DATABASE:-db2.example.com} EscapedDefaultReplaced2=\${ANOTHER_DATABASE:-db2.example.com} +NoEscape2=\\db.example.com NoEscapeDefault2=\\db.example.com NoEscapeDefaultReplaced2=\\db2.example.com +Escaped3=\\${DATABASE} EscapedDefault3=\\${DATABASE:-db2.example.com} EscapedDefaultReplaced3=\\${ANOTHER_DATABASE:-db2.example.com} +NoEscape3=\\\db.example.com NoEscapeDefault3=\\\db.example.com NoEscapeDefaultReplaced3=\\\db2.example.com +`, _read(t, file)) } func TestStrictParse(t *testing.T) { - Config.Strict = true - - ErrorFunc = log.Panicf - - file, _ := _template(t) - defer _delete(t, file) - - assert.Panics(t, func() { parse(file) }) - -} - -func TestFilemode(t *testing.T) { - - file := _write(t, "filemode.text", "", 0654) - defer _delete(t, file) - - mode := filemode(file) - - assert.Equal(t, "-rw-r-xr--", mode.String()) - -} - -func TestCapture(t *testing.T) { - - assert := assert.New(t) - - var tt = []struct { - in, e, v, s, d string - }{ - {"${FOO}", "", "FOO", NoDefaultDefined, NoDefaultDefined}, - {"${FOO:-bar}", "", "FOO", ":-", "bar"}, - {"${FOO:-at the bar}", "", "FOO", ":-", "at the bar"}, - {"${FOO_3000:-near the bar}", "", "FOO_3000", ":-", "near the bar"}, - {"${FOO:--1}", "", "FOO", ":-", "-1"}, - {"${FOO:-http://www.example.com/bar/gar/war?a=b}", "", "FOO", ":-", "http://www.example.com/bar/gar/war?a=b"}, - {`\${FOO}`, `\`, "FOO", NoDefaultDefined, NoDefaultDefined}, - {`\\${FOO:-bar}`, `\\`, "FOO", ":-", "bar"}, - {`\\\${FOO:-bar}`, `\\\`, "FOO", ":-", "bar"}, - {`\\\\${FOO:-bar}`, `\\\\`, "FOO", ":-", "bar"}, - {"foo${FOO}", "", "FOO", NoDefaultDefined, NoDefaultDefined}, - } - - for _, tt := range tt { - - e, v, s, d := capture(tt.in) - - assert.Equal(tt.e, e) - assert.Equal(tt.v, v) - assert.Equal(tt.s, s) - assert.Equal(tt.d, d) - - } - -} - -func TestEscape(t *testing.T) { - assert := assert.New(t) - - var tt = []struct { - in, e string - }{ - {`foo`, NotAnEscapeSequence}, - {`${FOO}`, NotAnEscapeSequence}, - {`\${FOO}`, `${FOO}`}, - {`\\${FOO}`, NotAnEscapeSequence}, - {`\\\${FOO:-bar}`, `\${FOO:-bar}`}, - {`\\\\${FOO}`, NotAnEscapeSequence}, - {`\\\\\${FOO}`, `\\${FOO}`}, - {`\\\\\\${FOO}`, NotAnEscapeSequence}, - {`\\\\\\\${FOO:-bar}`, `\\\${FOO:-bar}`}, + handler := &Handler{ + Backup: true, + Strict: true, } - for _, tt := range tt { + var ( + file = "test/template4.txt" + ) - esc := escape(tt.in) + defer _restore(file) - assert.Equal(tt.e, esc) + err := handler.parse(file) + assert.Error(err) - } + assert.Equal("'test/template4.txt' requires undeclared environment variable 'ANOTHER_DATABASE', but cannot use default 'db2.example.com' (strict-mode)", err.Error()) } diff --git a/helper_test.go b/helper_test.go new file mode 100644 index 0000000..1403285 --- /dev/null +++ b/helper_test.go @@ -0,0 +1,119 @@ +package envplate + +import ( + "bytes" + "fmt" + "io" + "io/ioutil" + "log" + "os" + "testing" +) + +func _delete(t *testing.T, name string) { + + if err := os.Remove(name); err != nil { + t.Fatal(fmt.Errorf("error while deleting '%s': %v", name, err)) + } + +} + +func _exists(name string) bool { + _, err := os.Stat(name) + return err == nil +} + +func _read(t *testing.T, name string) string { + + content, err := ioutil.ReadFile(name) + + if err != nil { + t.Fatal(fmt.Errorf("error while reading '%s': %v", name, err)) + } + + return string(content) + +} + +func _redirect_logs(f func(func() string, func() string)) { + + var ( + logBuf bytes.Buffer + rawBuf bytes.Buffer + ) + + defer func(w io.Writer) { + Logger.Out = w + }(Logger.Out) + + defer func(w io.Writer) { + log.SetOutput(w) + }(os.Stderr) + + log.SetOutput(&logBuf) + Logger.Out = &rawBuf + + logs := func(buf *bytes.Buffer) func() string { + + return func() string { + + logs := buf.String() + buf.Reset() + + return logs + + } + + } + + f(logs(&logBuf), logs(&rawBuf)) + +} + +func _restore(file string) { + + backup := fmt.Sprintf("%s.bak", file) + + os.Remove(file) + os.Link(backup, file) + os.Remove(backup) + +} + +func _tmpdir(t *testing.T, f func(string)) { + + dir, err := ioutil.TempDir("", "") + + if err != nil { + t.Fatal(err) + } + + defer os.RemoveAll(dir) + + f(dir) + +} + +func _write(t *testing.T, name, content string, mode os.FileMode) string { + + file, err := ioutil.TempFile("", name) + + if err != nil { + t.Fatalf("Error while opening '%s': %v", name, err) + } + + if _, err := file.WriteString(content); err != nil { + t.Fatalf("Error while writing to '%s': %v", name, err) + } + + if err := file.Close(); err != nil { + t.Fatalf("Error while closing '%s': %v", name, err) + } + + if err := os.Chmod(file.Name(), mode); err != nil { + t.Fatalf("Error while chmod '%s': %v", name, err) + } + + return file.Name() + +} diff --git a/logger.go b/logger.go index ca5538b..4aa4ba5 100644 --- a/logger.go +++ b/logger.go @@ -1,35 +1,63 @@ package envplate import ( + "errors" "fmt" - "log" + "io" + _log "log" + "os" ) -type logLevel string - const ( - DEBUG logLevel = "DEBUG" - INFO = "INFO" - ERROR = "ERROR" -) -var ( - DebugAndInfoFunc = log.Printf - ErrorFunc = log.Fatalf + // RAW indicates a log message should be send directly to stdout + RAW logLevel = "RAW" + + // DEBUG indicates log message that are only visible when the verbose flag it set + DEBUG = "DEBUG" + + // INFO indicates regular log messages + INFO = "INFO" + + // ERROR indicates error log messages + ERROR = "ERROR" ) -func Log(lvl logLevel, msg string, args ...interface{}) { +type logLevel string + +type logger struct { + Out io.Writer + Verbose bool +} - if lvl == DEBUG && !Config.Verbose { - return +// Log exposes a logger with a custom envplate formatting syntax +var Logger = &logger{Out: os.Stdout} + +// Log emits log messages and filters based on the set verbosity - if an error +// is logged, the error msg is returned as error object +func Log(lvl logLevel, msg string, args ...interface{}) error { + return Logger.log(lvl, msg, args...) +} + +func (l *logger) log(lvl logLevel, msg string, args ...interface{}) error { + + if lvl == DEBUG && !l.Verbose { + return nil } - msg = fmt.Sprintf("[ %s ] %s", lvl, msg) + msg = fmt.Sprintf(msg, args...) + + if lvl == RAW { + fmt.Fprintf(l.Out, msg) + return nil + } + + _log.Printf("[ %s ] %s", lvl, msg) if lvl == ERROR { - ErrorFunc(msg, args...) - } else { - DebugAndInfoFunc(msg, args...) + return errors.New(msg) } + return nil + } diff --git a/logger_test.go b/logger_test.go index 84ee984..e2071f5 100644 --- a/logger_test.go +++ b/logger_test.go @@ -1,66 +1,60 @@ package envplate import ( - "bytes" - "log" - "os" + "errors" "testing" "github.com/stretchr/testify/assert" ) -func _redirect_logs(f func(func() string)) { - - var buf bytes.Buffer - - log.SetOutput(&buf) - - logs := func() string { - - logs := buf.String() - buf.Reset() - - return logs - - } - - f(logs) - - log.SetOutput(os.Stderr) - -} - func TestLog(t *testing.T) { - Config.Verbose = false + reset := func(v bool) { + Logger.Verbose = v + } - ErrorFunc = log.Panicf + defer reset(Logger.Verbose) + reset(false) - _redirect_logs(func(logs func() string) { + _redirect_logs(func(logs func() string, raw func() string) { assert := assert.New(t) - Log(DEBUG, "Hello debug world") + err := Log(RAW, "Hello raw world") + assert.NoError(err) + assert.Empty(logs()) + assert.Equal(raw(), "Hello raw world") + + err = Log(DEBUG, "Hello debug world") + assert.NoError(err) assert.Empty(logs()) - Log(INFO, "Hello info world") + err = Log(INFO, "Hello info world") + assert.NoError(err) assert.NotEmpty(logs()) - assert.Panics(func() { Log(ERROR, "Hello error world") }) + err = Log(ERROR, "Hello error world") + assert.Error(err) + assert.Equal(errors.New("Hello error world"), err) assert.NotEmpty(logs()) - Config.Verbose = true + Logger.Verbose = true - Log(DEBUG, "Hello debug world") + err = Log(DEBUG, "Hello debug world") + assert.NoError(err) assert.Regexp(".+ \\[ DEBUG \\] Hello debug world", logs()) - Log(DEBUG, "Hello %s %s", "debug", "world") + err = Log(DEBUG, "Hello %s %s", "debug", "world") + assert.NoError(err) assert.Regexp(".+ \\[ DEBUG \\] Hello debug world", logs()) - Log(INFO, "Hello info world") + err = Log(INFO, "Hello info world") + assert.NoError(err) assert.Regexp(".+ \\[ INFO \\] Hello info world", logs()) - assert.Panics(func() { Log(ERROR, "Hello error world") }) + err = Log(ERROR, "Hello error world") + assert.Error(err) + assert.Equal(errors.New("Hello error world"), err) assert.Regexp(".+ \\[ ERROR \\] Hello error world", logs()) }) diff --git a/test/.gitignore b/test/.gitignore new file mode 100644 index 0000000..751553b --- /dev/null +++ b/test/.gitignore @@ -0,0 +1 @@ +*.bak diff --git a/test/filemode.txt b/test/filemode.txt new file mode 100755 index 0000000..3b18e51 --- /dev/null +++ b/test/filemode.txt @@ -0,0 +1 @@ +hello world diff --git a/test/template1.txt b/test/template1.txt new file mode 100644 index 0000000..87fd076 --- /dev/null +++ b/test/template1.txt @@ -0,0 +1,7 @@ +Database1=${DATABASE} +Mode=${MODE} +Null=${NULL} +Database2=${DATABASE} +Database3=$NOT_A_VARIABLE +Database4=${ANOTHER_DATABASE:-db2.example.com} +Database5=${DATABASE:-db2.example.com} diff --git a/test/template2.txt b/test/template2.txt new file mode 100644 index 0000000..75c9b57 --- /dev/null +++ b/test/template2.txt @@ -0,0 +1,4 @@ +Double1=${DATABASE} Double2=${DATABASE} +Double3=${DATABASE:-db2.example.com} Double4=${DATABASE:-db2.example.com} +DoubleDefault1=${ANOTHER_DATABASE:-db2-example.com} DoubleDefault2=${ANOTHER_DATABASE:-db2-example.com} +EmptyDefault=${UNDEFINED_VARIABLE:-} diff --git a/test/template3.txt b/test/template3.txt new file mode 100644 index 0000000..1157114 --- /dev/null +++ b/test/template3.txt @@ -0,0 +1,7 @@ +${DATABASE} +Escaped1=\${DATABASE} EscapedDefault1=\${DATABASE:-db2.example.com} EscapedDefaultReplaced1=\${ANOTHER_DATABASE:-db2.example.com} +NoEscape1=\\${DATABASE} NoEscapeDefault1=\\${DATABASE:-db2.example.com} NoEscapeDefaultReplaced1=\\${ANOTHER_DATABASE:-db2.example.com} +Escaped2=\\\${DATABASE} EscapedDefault2=\\\${DATABASE:-db2.example.com} EscapedDefaultReplaced2=\\\${ANOTHER_DATABASE:-db2.example.com} +NoEscape2=\\\\${DATABASE} NoEscapeDefault2=\\\\${DATABASE:-db2.example.com} NoEscapeDefaultReplaced2=\\\\${ANOTHER_DATABASE:-db2.example.com} +Escaped3=\\\\\${DATABASE} EscapedDefault3=\\\\\${DATABASE:-db2.example.com} EscapedDefaultReplaced3=\\\\\${ANOTHER_DATABASE:-db2.example.com} +NoEscape3=\\\\\\${DATABASE} NoEscapeDefault3=\\\\\\${DATABASE:-db2.example.com} NoEscapeDefaultReplaced3=\\\\\\${ANOTHER_DATABASE:-db2.example.com} diff --git a/test/template4.txt b/test/template4.txt new file mode 100644 index 0000000..1157114 --- /dev/null +++ b/test/template4.txt @@ -0,0 +1,7 @@ +${DATABASE} +Escaped1=\${DATABASE} EscapedDefault1=\${DATABASE:-db2.example.com} EscapedDefaultReplaced1=\${ANOTHER_DATABASE:-db2.example.com} +NoEscape1=\\${DATABASE} NoEscapeDefault1=\\${DATABASE:-db2.example.com} NoEscapeDefaultReplaced1=\\${ANOTHER_DATABASE:-db2.example.com} +Escaped2=\\\${DATABASE} EscapedDefault2=\\\${DATABASE:-db2.example.com} EscapedDefaultReplaced2=\\\${ANOTHER_DATABASE:-db2.example.com} +NoEscape2=\\\\${DATABASE} NoEscapeDefault2=\\\\${DATABASE:-db2.example.com} NoEscapeDefaultReplaced2=\\\\${ANOTHER_DATABASE:-db2.example.com} +Escaped3=\\\\\${DATABASE} EscapedDefault3=\\\\\${DATABASE:-db2.example.com} EscapedDefaultReplaced3=\\\\\${ANOTHER_DATABASE:-db2.example.com} +NoEscape3=\\\\\\${DATABASE} NoEscapeDefault3=\\\\\\${DATABASE:-db2.example.com} NoEscapeDefaultReplaced3=\\\\\\${ANOTHER_DATABASE:-db2.example.com} diff --git a/util.go b/util.go new file mode 100644 index 0000000..2257f9e --- /dev/null +++ b/util.go @@ -0,0 +1,49 @@ +package envplate + +import ( + "fmt" + "io" + "os" +) + +func createBackup(file string) error { + + source, err := os.Open(file) + + if err != nil { + return err + } + + defer source.Close() + + target, err := os.Create(fmt.Sprintf("%s.bak", file)) + + if err != nil { + return err + } + + defer target.Close() + + if _, err := io.Copy(target, source); err != nil { + return err + } else if mode, err := filemode(source.Name()); err != nil { + return err + } else if err := os.Chmod(target.Name(), mode); err != nil { + return err + } + + return nil + +} + +func filemode(file string) (os.FileMode, error) { + + fileinfo, err := os.Stat(file) + + if err != nil { + return 0, fmt.Errorf("cannot stat '%s': %v", file, err) + } + + return fileinfo.Mode(), nil + +} diff --git a/util_test.go b/util_test.go new file mode 100644 index 0000000..02a38be --- /dev/null +++ b/util_test.go @@ -0,0 +1,66 @@ +package envplate + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + testBackupFile = "test/filemode.txt.bak" + testFile = "test/filemode.txt" +) + +func TestBackup(t *testing.T) { + + defer _delete(t, testBackupFile) + + assert := assert.New(t) + + modeBackup, err := filemode(testBackupFile) + assert.Error(err) + + modeFile, err := filemode(testFile) + assert.NoError(err) + + assert.Equal("hello world\n", _read(t, testFile)) + + assert.NoError(createBackup(testFile)) + + modeBackup, err = filemode(testBackupFile) + assert.NoError(err) + + modeFile, err = filemode(testFile) + assert.NoError(err) + + assert.Equal(modeBackup.String(), modeFile.String()) + assert.Equal(_read(t, testFile), _read(t, testBackupFile)) + +} + +func TestBackupErrors(t *testing.T) { + + assert := assert.New(t) + + _tmpdir(t, func(dir string) { + + err := createBackup(dir) + assert.Error(err) + + }) + +} + +func TestFilemode(t *testing.T) { + + assert := assert.New(t) + + mode, err := filemode(testFile) + + assert.NoError(err) + assert.Equal("-rwxrw-r-x", mode.String()) + + mode, err = filemode("test/nofile.txt") + assert.Error(err) + +} diff --git a/vendor/github.com/davecgh/go-spew/LICENSE b/vendor/github.com/davecgh/go-spew/LICENSE new file mode 100644 index 0000000..2a7cfd2 --- /dev/null +++ b/vendor/github.com/davecgh/go-spew/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2012-2013 Dave Collins + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/vendor/github.com/davecgh/go-spew/spew/common.go b/vendor/github.com/davecgh/go-spew/spew/common.go new file mode 100644 index 0000000..bc26e7e --- /dev/null +++ b/vendor/github.com/davecgh/go-spew/spew/common.go @@ -0,0 +1,344 @@ +/* + * Copyright (c) 2013 Dave Collins + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +package spew + +import ( + "fmt" + "io" + "reflect" + "sort" + "strconv" + "unsafe" +) + +// offsetPtr, offsetScalar, and offsetFlag are the offsets for the internal +// reflect.Value fields. +var offsetPtr, offsetScalar, offsetFlag uintptr + +// reflectValueOld mirrors the struct layout of the reflect package Value type +// before golang commit ecccf07e7f9d. +var reflectValueOld struct { + typ unsafe.Pointer + val unsafe.Pointer + flag uintptr +} + +// reflectValueNew mirrors the struct layout of the reflect package Value type +// after golang commit ecccf07e7f9d. +var reflectValueNew struct { + typ unsafe.Pointer + ptr unsafe.Pointer + scalar uintptr + flag uintptr +} + +func init() { + // Older versions of reflect.Value stored small integers directly in the + // ptr field (which is named val in the older versions). Newer versions + // added a new field named scalar for this purpose which unfortuantely + // comes before the flag field. Further the new field is before the + // flag field, so the offset of the flag field is different as well. + // This code constructs a new reflect.Value from a known small integer + // and checks if the val field within it matches. When it matches, the + // old style reflect.Value is being used. Otherwise it's the new style. + v := 0xf00 + vv := reflect.ValueOf(v) + upv := unsafe.Pointer(uintptr(unsafe.Pointer(&vv)) + + unsafe.Offsetof(reflectValueOld.val)) + + // Assume the old style by default. + offsetPtr = unsafe.Offsetof(reflectValueOld.val) + offsetScalar = 0 + offsetFlag = unsafe.Offsetof(reflectValueOld.flag) + + // Use the new style offsets if the ptr field doesn't match the value + // since it must be in the new scalar field. + if int(*(*uintptr)(upv)) != v { + offsetPtr = unsafe.Offsetof(reflectValueNew.ptr) + offsetScalar = unsafe.Offsetof(reflectValueNew.scalar) + offsetFlag = unsafe.Offsetof(reflectValueNew.flag) + } +} + +// flagIndir indicates whether the value field of a reflect.Value is the actual +// data or a pointer to the data. +const flagIndir = 1 << 1 + +// unsafeReflectValue converts the passed reflect.Value into a one that bypasses +// the typical safety restrictions preventing access to unaddressable and +// unexported data. It works by digging the raw pointer to the underlying +// value out of the protected value and generating a new unprotected (unsafe) +// reflect.Value to it. +// +// This allows us to check for implementations of the Stringer and error +// interfaces to be used for pretty printing ordinarily unaddressable and +// inaccessible values such as unexported struct fields. +func unsafeReflectValue(v reflect.Value) (rv reflect.Value) { + indirects := 1 + vt := v.Type() + upv := unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetPtr) + rvf := *(*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetFlag)) + if rvf&flagIndir != 0 { + vt = reflect.PtrTo(v.Type()) + indirects++ + } else if offsetScalar != 0 { + upv = unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetScalar) + } + + pv := reflect.NewAt(vt, upv) + rv = pv + for i := 0; i < indirects; i++ { + rv = rv.Elem() + } + return rv +} + +// Some constants in the form of bytes to avoid string overhead. This mirrors +// the technique used in the fmt package. +var ( + panicBytes = []byte("(PANIC=") + plusBytes = []byte("+") + iBytes = []byte("i") + trueBytes = []byte("true") + falseBytes = []byte("false") + interfaceBytes = []byte("(interface {})") + commaNewlineBytes = []byte(",\n") + newlineBytes = []byte("\n") + openBraceBytes = []byte("{") + openBraceNewlineBytes = []byte("{\n") + closeBraceBytes = []byte("}") + asteriskBytes = []byte("*") + colonBytes = []byte(":") + colonSpaceBytes = []byte(": ") + openParenBytes = []byte("(") + closeParenBytes = []byte(")") + spaceBytes = []byte(" ") + pointerChainBytes = []byte("->") + nilAngleBytes = []byte("") + maxNewlineBytes = []byte("\n") + maxShortBytes = []byte("") + circularBytes = []byte("") + circularShortBytes = []byte("") + invalidAngleBytes = []byte("") + openBracketBytes = []byte("[") + closeBracketBytes = []byte("]") + percentBytes = []byte("%") + precisionBytes = []byte(".") + openAngleBytes = []byte("<") + closeAngleBytes = []byte(">") + openMapBytes = []byte("map[") + closeMapBytes = []byte("]") +) + +// hexDigits is used to map a decimal value to a hex digit. +var hexDigits = "0123456789abcdef" + +// catchPanic handles any panics that might occur during the handleMethods +// calls. +func catchPanic(w io.Writer, v reflect.Value) { + if err := recover(); err != nil { + w.Write(panicBytes) + fmt.Fprintf(w, "%v", err) + w.Write(closeParenBytes) + } +} + +// handleMethods attempts to call the Error and String methods on the underlying +// type the passed reflect.Value represents and outputes the result to Writer w. +// +// It handles panics in any called methods by catching and displaying the error +// as the formatted value. +func handleMethods(cs *ConfigState, w io.Writer, v reflect.Value) (handled bool) { + // We need an interface to check if the type implements the error or + // Stringer interface. However, the reflect package won't give us an + // interface on certain things like unexported struct fields in order + // to enforce visibility rules. We use unsafe to bypass these restrictions + // since this package does not mutate the values. + if !v.CanInterface() { + v = unsafeReflectValue(v) + } + + // Choose whether or not to do error and Stringer interface lookups against + // the base type or a pointer to the base type depending on settings. + // Technically calling one of these methods with a pointer receiver can + // mutate the value, however, types which choose to satisify an error or + // Stringer interface with a pointer receiver should not be mutating their + // state inside these interface methods. + var viface interface{} + if !cs.DisablePointerMethods { + if !v.CanAddr() { + v = unsafeReflectValue(v) + } + viface = v.Addr().Interface() + } else { + if v.CanAddr() { + v = v.Addr() + } + viface = v.Interface() + } + + // Is it an error or Stringer? + switch iface := viface.(type) { + case error: + defer catchPanic(w, v) + if cs.ContinueOnMethod { + w.Write(openParenBytes) + w.Write([]byte(iface.Error())) + w.Write(closeParenBytes) + w.Write(spaceBytes) + return false + } + + w.Write([]byte(iface.Error())) + return true + + case fmt.Stringer: + defer catchPanic(w, v) + if cs.ContinueOnMethod { + w.Write(openParenBytes) + w.Write([]byte(iface.String())) + w.Write(closeParenBytes) + w.Write(spaceBytes) + return false + } + w.Write([]byte(iface.String())) + return true + } + return false +} + +// printBool outputs a boolean value as true or false to Writer w. +func printBool(w io.Writer, val bool) { + if val { + w.Write(trueBytes) + } else { + w.Write(falseBytes) + } +} + +// printInt outputs a signed integer value to Writer w. +func printInt(w io.Writer, val int64, base int) { + w.Write([]byte(strconv.FormatInt(val, base))) +} + +// printUint outputs an unsigned integer value to Writer w. +func printUint(w io.Writer, val uint64, base int) { + w.Write([]byte(strconv.FormatUint(val, base))) +} + +// printFloat outputs a floating point value using the specified precision, +// which is expected to be 32 or 64bit, to Writer w. +func printFloat(w io.Writer, val float64, precision int) { + w.Write([]byte(strconv.FormatFloat(val, 'g', -1, precision))) +} + +// printComplex outputs a complex value using the specified float precision +// for the real and imaginary parts to Writer w. +func printComplex(w io.Writer, c complex128, floatPrecision int) { + r := real(c) + w.Write(openParenBytes) + w.Write([]byte(strconv.FormatFloat(r, 'g', -1, floatPrecision))) + i := imag(c) + if i >= 0 { + w.Write(plusBytes) + } + w.Write([]byte(strconv.FormatFloat(i, 'g', -1, floatPrecision))) + w.Write(iBytes) + w.Write(closeParenBytes) +} + +// printHexPtr outputs a uintptr formatted as hexidecimal with a leading '0x' +// prefix to Writer w. +func printHexPtr(w io.Writer, p uintptr) { + // Null pointer. + num := uint64(p) + if num == 0 { + w.Write(nilAngleBytes) + return + } + + // Max uint64 is 16 bytes in hex + 2 bytes for '0x' prefix + buf := make([]byte, 18) + + // It's simpler to construct the hex string right to left. + base := uint64(16) + i := len(buf) - 1 + for num >= base { + buf[i] = hexDigits[num%base] + num /= base + i-- + } + buf[i] = hexDigits[num] + + // Add '0x' prefix. + i-- + buf[i] = 'x' + i-- + buf[i] = '0' + + // Strip unused leading bytes. + buf = buf[i:] + w.Write(buf) +} + +// valuesSorter implements sort.Interface to allow a slice of reflect.Value +// elements to be sorted. +type valuesSorter struct { + values []reflect.Value +} + +// Len returns the number of values in the slice. It is part of the +// sort.Interface implementation. +func (s *valuesSorter) Len() int { + return len(s.values) +} + +// Swap swaps the values at the passed indices. It is part of the +// sort.Interface implementation. +func (s *valuesSorter) Swap(i, j int) { + s.values[i], s.values[j] = s.values[j], s.values[i] +} + +// Less returns whether the value at index i should sort before the +// value at index j. It is part of the sort.Interface implementation. +func (s *valuesSorter) Less(i, j int) bool { + switch s.values[i].Kind() { + case reflect.Bool: + return !s.values[i].Bool() && s.values[j].Bool() + case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: + return s.values[i].Int() < s.values[j].Int() + case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: + return s.values[i].Uint() < s.values[j].Uint() + case reflect.Float32, reflect.Float64: + return s.values[i].Float() < s.values[j].Float() + case reflect.String: + return s.values[i].String() < s.values[j].String() + case reflect.Uintptr: + return s.values[i].Uint() < s.values[j].Uint() + } + return s.values[i].String() < s.values[j].String() +} + +// sortValues is a generic sort function for native types: int, uint, bool, +// string and uintptr. Other inputs are sorted according to their +// Value.String() value to ensure display stability. +func sortValues(values []reflect.Value) { + if len(values) == 0 { + return + } + sort.Sort(&valuesSorter{values}) +} diff --git a/vendor/github.com/davecgh/go-spew/spew/config.go b/vendor/github.com/davecgh/go-spew/spew/config.go new file mode 100644 index 0000000..e516675 --- /dev/null +++ b/vendor/github.com/davecgh/go-spew/spew/config.go @@ -0,0 +1,288 @@ +/* + * Copyright (c) 2013 Dave Collins + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +package spew + +import ( + "bytes" + "fmt" + "io" + "os" +) + +// ConfigState houses the configuration options used by spew to format and +// display values. There is a global instance, Config, that is used to control +// all top-level Formatter and Dump functionality. Each ConfigState instance +// provides methods equivalent to the top-level functions. +// +// The zero value for ConfigState provides no indentation. You would typically +// want to set it to a space or a tab. +// +// Alternatively, you can use NewDefaultConfig to get a ConfigState instance +// with default settings. See the documentation of NewDefaultConfig for default +// values. +type ConfigState struct { + // Indent specifies the string to use for each indentation level. The + // global config instance that all top-level functions use set this to a + // single space by default. If you would like more indentation, you might + // set this to a tab with "\t" or perhaps two spaces with " ". + Indent string + + // MaxDepth controls the maximum number of levels to descend into nested + // data structures. The default, 0, means there is no limit. + // + // NOTE: Circular data structures are properly detected, so it is not + // necessary to set this value unless you specifically want to limit deeply + // nested data structures. + MaxDepth int + + // DisableMethods specifies whether or not error and Stringer interfaces are + // invoked for types that implement them. + DisableMethods bool + + // DisablePointerMethods specifies whether or not to check for and invoke + // error and Stringer interfaces on types which only accept a pointer + // receiver when the current type is not a pointer. + // + // NOTE: This might be an unsafe action since calling one of these methods + // with a pointer receiver could technically mutate the value, however, + // in practice, types which choose to satisify an error or Stringer + // interface with a pointer receiver should not be mutating their state + // inside these interface methods. + DisablePointerMethods bool + + // ContinueOnMethod specifies whether or not recursion should continue once + // a custom error or Stringer interface is invoked. The default, false, + // means it will print the results of invoking the custom error or Stringer + // interface and return immediately instead of continuing to recurse into + // the internals of the data type. + // + // NOTE: This flag does not have any effect if method invocation is disabled + // via the DisableMethods or DisablePointerMethods options. + ContinueOnMethod bool + + // SortKeys specifies map keys should be sorted before being printed. Use + // this to have a more deterministic, diffable output. Note that only + // native types (bool, int, uint, floats, uintptr and string) are supported + // with other types sorted according to the reflect.Value.String() output + // which guarantees display stability. + SortKeys bool +} + +// Config is the active configuration of the top-level functions. +// The configuration can be changed by modifying the contents of spew.Config. +var Config = ConfigState{Indent: " "} + +// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were +// passed with a Formatter interface returned by c.NewFormatter. It returns +// the formatted string as a value that satisfies error. See NewFormatter +// for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Errorf(format, c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Errorf(format string, a ...interface{}) (err error) { + return fmt.Errorf(format, c.convertArgs(a)...) +} + +// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were +// passed with a Formatter interface returned by c.NewFormatter. It returns +// the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Fprint(w, c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Fprint(w io.Writer, a ...interface{}) (n int, err error) { + return fmt.Fprint(w, c.convertArgs(a)...) +} + +// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were +// passed with a Formatter interface returned by c.NewFormatter. It returns +// the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Fprintf(w, format, c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { + return fmt.Fprintf(w, format, c.convertArgs(a)...) +} + +// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it +// passed with a Formatter interface returned by c.NewFormatter. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Fprintln(w, c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { + return fmt.Fprintln(w, c.convertArgs(a)...) +} + +// Print is a wrapper for fmt.Print that treats each argument as if it were +// passed with a Formatter interface returned by c.NewFormatter. It returns +// the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Print(c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Print(a ...interface{}) (n int, err error) { + return fmt.Print(c.convertArgs(a)...) +} + +// Printf is a wrapper for fmt.Printf that treats each argument as if it were +// passed with a Formatter interface returned by c.NewFormatter. It returns +// the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Printf(format, c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Printf(format string, a ...interface{}) (n int, err error) { + return fmt.Printf(format, c.convertArgs(a)...) +} + +// Println is a wrapper for fmt.Println that treats each argument as if it were +// passed with a Formatter interface returned by c.NewFormatter. It returns +// the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Println(c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Println(a ...interface{}) (n int, err error) { + return fmt.Println(c.convertArgs(a)...) +} + +// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were +// passed with a Formatter interface returned by c.NewFormatter. It returns +// the resulting string. See NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Sprint(c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Sprint(a ...interface{}) string { + return fmt.Sprint(c.convertArgs(a)...) +} + +// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were +// passed with a Formatter interface returned by c.NewFormatter. It returns +// the resulting string. See NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Sprintf(format, c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Sprintf(format string, a ...interface{}) string { + return fmt.Sprintf(format, c.convertArgs(a)...) +} + +// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it +// were passed with a Formatter interface returned by c.NewFormatter. It +// returns the resulting string. See NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Sprintln(c.NewFormatter(a), c.NewFormatter(b)) +func (c *ConfigState) Sprintln(a ...interface{}) string { + return fmt.Sprintln(c.convertArgs(a)...) +} + +/* +NewFormatter returns a custom formatter that satisfies the fmt.Formatter +interface. As a result, it integrates cleanly with standard fmt package +printing functions. The formatter is useful for inline printing of smaller data +types similar to the standard %v format specifier. + +The custom formatter only responds to the %v (most compact), %+v (adds pointer +addresses), %#v (adds types), and %#+v (adds types and pointer addresses) verb +combinations. Any other verbs such as %x and %q will be sent to the the +standard fmt package for formatting. In addition, the custom formatter ignores +the width and precision arguments (however they will still work on the format +specifiers not handled by the custom formatter). + +Typically this function shouldn't be called directly. It is much easier to make +use of the custom formatter by calling one of the convenience functions such as +c.Printf, c.Println, or c.Printf. +*/ +func (c *ConfigState) NewFormatter(v interface{}) fmt.Formatter { + return newFormatter(c, v) +} + +// Fdump formats and displays the passed arguments to io.Writer w. It formats +// exactly the same as Dump. +func (c *ConfigState) Fdump(w io.Writer, a ...interface{}) { + fdump(c, w, a...) +} + +/* +Dump displays the passed parameters to standard out with newlines, customizable +indentation, and additional debug information such as complete types and all +pointer addresses used to indirect to the final value. It provides the +following features over the built-in printing facilities provided by the fmt +package: + + * Pointers are dereferenced and followed + * Circular data structures are detected and handled properly + * Custom Stringer/error interfaces are optionally invoked, including + on unexported types + * Custom types which only implement the Stringer/error interfaces via + a pointer receiver are optionally invoked when passing non-pointer + variables + * Byte arrays and slices are dumped like the hexdump -C command which + includes offsets, byte values in hex, and ASCII output + +The configuration options are controlled by modifying the public members +of c. See ConfigState for options documentation. + +See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to +get the formatted result as a string. +*/ +func (c *ConfigState) Dump(a ...interface{}) { + fdump(c, os.Stdout, a...) +} + +// Sdump returns a string with the passed arguments formatted exactly the same +// as Dump. +func (c *ConfigState) Sdump(a ...interface{}) string { + var buf bytes.Buffer + fdump(c, &buf, a...) + return buf.String() +} + +// convertArgs accepts a slice of arguments and returns a slice of the same +// length with each argument converted to a spew Formatter interface using +// the ConfigState associated with s. +func (c *ConfigState) convertArgs(args []interface{}) (formatters []interface{}) { + formatters = make([]interface{}, len(args)) + for index, arg := range args { + formatters[index] = newFormatter(c, arg) + } + return formatters +} + +// NewDefaultConfig returns a ConfigState with the following default settings. +// +// Indent: " " +// MaxDepth: 0 +// DisableMethods: false +// DisablePointerMethods: false +// ContinueOnMethod: false +// SortKeys: false +func NewDefaultConfig() *ConfigState { + return &ConfigState{Indent: " "} +} diff --git a/vendor/github.com/davecgh/go-spew/spew/doc.go b/vendor/github.com/davecgh/go-spew/spew/doc.go new file mode 100644 index 0000000..2a0fb60 --- /dev/null +++ b/vendor/github.com/davecgh/go-spew/spew/doc.go @@ -0,0 +1,196 @@ +/* + * Copyright (c) 2013 Dave Collins + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* +Package spew implements a deep pretty printer for Go data structures to aid in +debugging. + +A quick overview of the additional features spew provides over the built-in +printing facilities for Go data types are as follows: + + * Pointers are dereferenced and followed + * Circular data structures are detected and handled properly + * Custom Stringer/error interfaces are optionally invoked, including + on unexported types + * Custom types which only implement the Stringer/error interfaces via + a pointer receiver are optionally invoked when passing non-pointer + variables + * Byte arrays and slices are dumped like the hexdump -C command which + includes offsets, byte values in hex, and ASCII output (only when using + Dump style) + +There are two different approaches spew allows for dumping Go data structures: + + * Dump style which prints with newlines, customizable indentation, + and additional debug information such as types and all pointer addresses + used to indirect to the final value + * A custom Formatter interface that integrates cleanly with the standard fmt + package and replaces %v, %+v, %#v, and %#+v to provide inline printing + similar to the default %v while providing the additional functionality + outlined above and passing unsupported format verbs such as %x and %q + along to fmt + +Quick Start + +This section demonstrates how to quickly get started with spew. See the +sections below for further details on formatting and configuration options. + +To dump a variable with full newlines, indentation, type, and pointer +information use Dump, Fdump, or Sdump: + spew.Dump(myVar1, myVar2, ...) + spew.Fdump(someWriter, myVar1, myVar2, ...) + str := spew.Sdump(myVar1, myVar2, ...) + +Alternatively, if you would prefer to use format strings with a compacted inline +printing style, use the convenience wrappers Printf, Fprintf, etc with +%v (most compact), %+v (adds pointer addresses), %#v (adds types), or +%#+v (adds types and pointer addresses): + spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2) + spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) + spew.Fprintf(someWriter, "myVar1: %v -- myVar2: %+v", myVar1, myVar2) + spew.Fprintf(someWriter, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) + +Configuration Options + +Configuration of spew is handled by fields in the ConfigState type. For +convenience, all of the top-level functions use a global state available +via the spew.Config global. + +It is also possible to create a ConfigState instance that provides methods +equivalent to the top-level functions. This allows concurrent configuration +options. See the ConfigState documentation for more details. + +The following configuration options are available: + * Indent + String to use for each indentation level for Dump functions. + It is a single space by default. A popular alternative is "\t". + + * MaxDepth + Maximum number of levels to descend into nested data structures. + There is no limit by default. + + * DisableMethods + Disables invocation of error and Stringer interface methods. + Method invocation is enabled by default. + + * DisablePointerMethods + Disables invocation of error and Stringer interface methods on types + which only accept pointer receivers from non-pointer variables. + Pointer method invocation is enabled by default. + + * ContinueOnMethod + Enables recursion into types after invoking error and Stringer interface + methods. Recursion after method invocation is disabled by default. + + * SortKeys + Specifies map keys should be sorted before being printed. Use + this to have a more deterministic, diffable output. Note that + only native types (bool, int, uint, floats, uintptr and string) + are supported with other types sorted according to the + reflect.Value.String() output which guarantees display stability. + Natural map order is used by default. + +Dump Usage + +Simply call spew.Dump with a list of variables you want to dump: + + spew.Dump(myVar1, myVar2, ...) + +You may also call spew.Fdump if you would prefer to output to an arbitrary +io.Writer. For example, to dump to standard error: + + spew.Fdump(os.Stderr, myVar1, myVar2, ...) + +A third option is to call spew.Sdump to get the formatted output as a string: + + str := spew.Sdump(myVar1, myVar2, ...) + +Sample Dump Output + +See the Dump example for details on the setup of the types and variables being +shown here. + + (main.Foo) { + unexportedField: (*main.Bar)(0xf84002e210)({ + flag: (main.Flag) flagTwo, + data: (uintptr) + }), + ExportedField: (map[interface {}]interface {}) { + (string) "one": (bool) true + } + } + +Byte (and uint8) arrays and slices are displayed uniquely like the hexdump -C +command as shown. + ([]uint8) { + 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... | + 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0| + 00000020 31 32 |12| + } + +Custom Formatter + +Spew provides a custom formatter the implements the fmt.Formatter interface +so that it integrates cleanly with standard fmt package printing functions. The +formatter is useful for inline printing of smaller data types similar to the +standard %v format specifier. + +The custom formatter only responds to the %v (most compact), %+v (adds pointer +addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb +combinations. Any other verbs such as %x and %q will be sent to the the +standard fmt package for formatting. In addition, the custom formatter ignores +the width and precision arguments (however they will still work on the format +specifiers not handled by the custom formatter). + +Custom Formatter Usage + +The simplest way to make use of the spew custom formatter is to call one of the +convenience functions such as spew.Printf, spew.Println, or spew.Printf. The +functions have syntax you are most likely already familiar with: + + spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2) + spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) + spew.Println(myVar, myVar2) + spew.Fprintf(os.Stderr, "myVar1: %v -- myVar2: %+v", myVar1, myVar2) + spew.Fprintf(os.Stderr, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) + +See the Index for the full list convenience functions. + +Sample Formatter Output + +Double pointer to a uint8: + %v: <**>5 + %+v: <**>(0xf8400420d0->0xf8400420c8)5 + %#v: (**uint8)5 + %#+v: (**uint8)(0xf8400420d0->0xf8400420c8)5 + +Pointer to circular struct with a uint8 field and a pointer to itself: + %v: <*>{1 <*>} + %+v: <*>(0xf84003e260){ui8:1 c:<*>(0xf84003e260)} + %#v: (*main.circular){ui8:(uint8)1 c:(*main.circular)} + %#+v: (*main.circular)(0xf84003e260){ui8:(uint8)1 c:(*main.circular)(0xf84003e260)} + +See the Printf example for details on the setup of variables being shown +here. + +Errors + +Since it is possible for custom Stringer/error interfaces to panic, spew +detects them and handles them internally by printing the panic information +inline with the output. Since spew is intended to provide deep pretty printing +capabilities on structures, it intentionally does not return any errors. +*/ +package spew diff --git a/vendor/github.com/davecgh/go-spew/spew/dump.go b/vendor/github.com/davecgh/go-spew/spew/dump.go new file mode 100644 index 0000000..3d57306 --- /dev/null +++ b/vendor/github.com/davecgh/go-spew/spew/dump.go @@ -0,0 +1,474 @@ +/* + * Copyright (c) 2013 Dave Collins + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +package spew + +import ( + "bytes" + "encoding/hex" + "fmt" + "io" + "os" + "reflect" + "regexp" + "strconv" + "strings" +) + +var ( + // uint8Type is a reflect.Type representing a uint8. It is used to + // convert cgo types to uint8 slices for hexdumping. + uint8Type = reflect.TypeOf(uint8(0)) + + // cCharRE is a regular expression that matches a cgo char. + // It is used to detect character arrays to hexdump them. + cCharRE = regexp.MustCompile("^.*\\._Ctype_char$") + + // cUnsignedCharRE is a regular expression that matches a cgo unsigned + // char. It is used to detect unsigned character arrays to hexdump + // them. + cUnsignedCharRE = regexp.MustCompile("^.*\\._Ctype_unsignedchar$") + + // cUint8tCharRE is a regular expression that matches a cgo uint8_t. + // It is used to detect uint8_t arrays to hexdump them. + cUint8tCharRE = regexp.MustCompile("^.*\\._Ctype_uint8_t$") +) + +// dumpState contains information about the state of a dump operation. +type dumpState struct { + w io.Writer + depth int + pointers map[uintptr]int + ignoreNextType bool + ignoreNextIndent bool + cs *ConfigState +} + +// indent performs indentation according to the depth level and cs.Indent +// option. +func (d *dumpState) indent() { + if d.ignoreNextIndent { + d.ignoreNextIndent = false + return + } + d.w.Write(bytes.Repeat([]byte(d.cs.Indent), d.depth)) +} + +// unpackValue returns values inside of non-nil interfaces when possible. +// This is useful for data types like structs, arrays, slices, and maps which +// can contain varying types packed inside an interface. +func (d *dumpState) unpackValue(v reflect.Value) reflect.Value { + if v.Kind() == reflect.Interface && !v.IsNil() { + v = v.Elem() + } + return v +} + +// dumpPtr handles formatting of pointers by indirecting them as necessary. +func (d *dumpState) dumpPtr(v reflect.Value) { + // Remove pointers at or below the current depth from map used to detect + // circular refs. + for k, depth := range d.pointers { + if depth >= d.depth { + delete(d.pointers, k) + } + } + + // Keep list of all dereferenced pointers to show later. + pointerChain := make([]uintptr, 0) + + // Figure out how many levels of indirection there are by dereferencing + // pointers and unpacking interfaces down the chain while detecting circular + // references. + nilFound := false + cycleFound := false + indirects := 0 + ve := v + for ve.Kind() == reflect.Ptr { + if ve.IsNil() { + nilFound = true + break + } + indirects++ + addr := ve.Pointer() + pointerChain = append(pointerChain, addr) + if pd, ok := d.pointers[addr]; ok && pd < d.depth { + cycleFound = true + indirects-- + break + } + d.pointers[addr] = d.depth + + ve = ve.Elem() + if ve.Kind() == reflect.Interface { + if ve.IsNil() { + nilFound = true + break + } + ve = ve.Elem() + } + } + + // Display type information. + d.w.Write(openParenBytes) + d.w.Write(bytes.Repeat(asteriskBytes, indirects)) + d.w.Write([]byte(ve.Type().String())) + d.w.Write(closeParenBytes) + + // Display pointer information. + if len(pointerChain) > 0 { + d.w.Write(openParenBytes) + for i, addr := range pointerChain { + if i > 0 { + d.w.Write(pointerChainBytes) + } + printHexPtr(d.w, addr) + } + d.w.Write(closeParenBytes) + } + + // Display dereferenced value. + d.w.Write(openParenBytes) + switch { + case nilFound == true: + d.w.Write(nilAngleBytes) + + case cycleFound == true: + d.w.Write(circularBytes) + + default: + d.ignoreNextType = true + d.dump(ve) + } + d.w.Write(closeParenBytes) +} + +// dumpSlice handles formatting of arrays and slices. Byte (uint8 under +// reflection) arrays and slices are dumped in hexdump -C fashion. +func (d *dumpState) dumpSlice(v reflect.Value) { + // Determine whether this type should be hex dumped or not. Also, + // for types which should be hexdumped, try to use the underlying data + // first, then fall back to trying to convert them to a uint8 slice. + var buf []uint8 + doConvert := false + doHexDump := false + numEntries := v.Len() + if numEntries > 0 { + vt := v.Index(0).Type() + vts := vt.String() + switch { + // C types that need to be converted. + case cCharRE.MatchString(vts): + fallthrough + case cUnsignedCharRE.MatchString(vts): + fallthrough + case cUint8tCharRE.MatchString(vts): + doConvert = true + + // Try to use existing uint8 slices and fall back to converting + // and copying if that fails. + case vt.Kind() == reflect.Uint8: + // We need an addressable interface to convert the type back + // into a byte slice. However, the reflect package won't give + // us an interface on certain things like unexported struct + // fields in order to enforce visibility rules. We use unsafe + // to bypass these restrictions since this package does not + // mutate the values. + vs := v + if !vs.CanInterface() || !vs.CanAddr() { + vs = unsafeReflectValue(vs) + } + vs = vs.Slice(0, numEntries) + + // Use the existing uint8 slice if it can be type + // asserted. + iface := vs.Interface() + if slice, ok := iface.([]uint8); ok { + buf = slice + doHexDump = true + break + } + + // The underlying data needs to be converted if it can't + // be type asserted to a uint8 slice. + doConvert = true + } + + // Copy and convert the underlying type if needed. + if doConvert && vt.ConvertibleTo(uint8Type) { + // Convert and copy each element into a uint8 byte + // slice. + buf = make([]uint8, numEntries) + for i := 0; i < numEntries; i++ { + vv := v.Index(i) + buf[i] = uint8(vv.Convert(uint8Type).Uint()) + } + doHexDump = true + } + } + + // Hexdump the entire slice as needed. + if doHexDump { + indent := strings.Repeat(d.cs.Indent, d.depth) + str := indent + hex.Dump(buf) + str = strings.Replace(str, "\n", "\n"+indent, -1) + str = strings.TrimRight(str, d.cs.Indent) + d.w.Write([]byte(str)) + return + } + + // Recursively call dump for each item. + for i := 0; i < numEntries; i++ { + d.dump(d.unpackValue(v.Index(i))) + if i < (numEntries - 1) { + d.w.Write(commaNewlineBytes) + } else { + d.w.Write(newlineBytes) + } + } +} + +// dump is the main workhorse for dumping a value. It uses the passed reflect +// value to figure out what kind of object we are dealing with and formats it +// appropriately. It is a recursive function, however circular data structures +// are detected and handled properly. +func (d *dumpState) dump(v reflect.Value) { + // Handle invalid reflect values immediately. + kind := v.Kind() + if kind == reflect.Invalid { + d.w.Write(invalidAngleBytes) + return + } + + // Handle pointers specially. + if kind == reflect.Ptr { + d.indent() + d.dumpPtr(v) + return + } + + // Print type information unless already handled elsewhere. + if !d.ignoreNextType { + d.indent() + d.w.Write(openParenBytes) + d.w.Write([]byte(v.Type().String())) + d.w.Write(closeParenBytes) + d.w.Write(spaceBytes) + } + d.ignoreNextType = false + + // Call Stringer/error interfaces if they exist and the handle methods flag + // is enabled + if !d.cs.DisableMethods { + if (kind != reflect.Invalid) && (kind != reflect.Interface) { + if handled := handleMethods(d.cs, d.w, v); handled { + return + } + } + } + + switch kind { + case reflect.Invalid: + // Do nothing. We should never get here since invalid has already + // been handled above. + + case reflect.Bool: + printBool(d.w, v.Bool()) + + case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: + printInt(d.w, v.Int(), 10) + + case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: + printUint(d.w, v.Uint(), 10) + + case reflect.Float32: + printFloat(d.w, v.Float(), 32) + + case reflect.Float64: + printFloat(d.w, v.Float(), 64) + + case reflect.Complex64: + printComplex(d.w, v.Complex(), 32) + + case reflect.Complex128: + printComplex(d.w, v.Complex(), 64) + + case reflect.Slice: + if v.IsNil() { + d.w.Write(nilAngleBytes) + break + } + fallthrough + + case reflect.Array: + d.w.Write(openBraceNewlineBytes) + d.depth++ + if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { + d.indent() + d.w.Write(maxNewlineBytes) + } else { + d.dumpSlice(v) + } + d.depth-- + d.indent() + d.w.Write(closeBraceBytes) + + case reflect.String: + d.w.Write([]byte(strconv.Quote(v.String()))) + + case reflect.Interface: + // The only time we should get here is for nil interfaces due to + // unpackValue calls. + if v.IsNil() { + d.w.Write(nilAngleBytes) + } + + case reflect.Ptr: + // Do nothing. We should never get here since pointers have already + // been handled above. + + case reflect.Map: + d.w.Write(openBraceNewlineBytes) + d.depth++ + if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { + d.indent() + d.w.Write(maxNewlineBytes) + } else { + numEntries := v.Len() + keys := v.MapKeys() + if d.cs.SortKeys { + sortValues(keys) + } + for i, key := range keys { + d.dump(d.unpackValue(key)) + d.w.Write(colonSpaceBytes) + d.ignoreNextIndent = true + d.dump(d.unpackValue(v.MapIndex(key))) + if i < (numEntries - 1) { + d.w.Write(commaNewlineBytes) + } else { + d.w.Write(newlineBytes) + } + } + } + d.depth-- + d.indent() + d.w.Write(closeBraceBytes) + + case reflect.Struct: + d.w.Write(openBraceNewlineBytes) + d.depth++ + if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { + d.indent() + d.w.Write(maxNewlineBytes) + } else { + vt := v.Type() + numFields := v.NumField() + for i := 0; i < numFields; i++ { + d.indent() + vtf := vt.Field(i) + d.w.Write([]byte(vtf.Name)) + d.w.Write(colonSpaceBytes) + d.ignoreNextIndent = true + d.dump(d.unpackValue(v.Field(i))) + if i < (numFields - 1) { + d.w.Write(commaNewlineBytes) + } else { + d.w.Write(newlineBytes) + } + } + } + d.depth-- + d.indent() + d.w.Write(closeBraceBytes) + + case reflect.Uintptr: + printHexPtr(d.w, uintptr(v.Uint())) + + case reflect.UnsafePointer, reflect.Chan, reflect.Func: + printHexPtr(d.w, v.Pointer()) + + // There were not any other types at the time this code was written, but + // fall back to letting the default fmt package handle it in case any new + // types are added. + default: + if v.CanInterface() { + fmt.Fprintf(d.w, "%v", v.Interface()) + } else { + fmt.Fprintf(d.w, "%v", v.String()) + } + } +} + +// fdump is a helper function to consolidate the logic from the various public +// methods which take varying writers and config states. +func fdump(cs *ConfigState, w io.Writer, a ...interface{}) { + for _, arg := range a { + if arg == nil { + w.Write(interfaceBytes) + w.Write(spaceBytes) + w.Write(nilAngleBytes) + w.Write(newlineBytes) + continue + } + + d := dumpState{w: w, cs: cs} + d.pointers = make(map[uintptr]int) + d.dump(reflect.ValueOf(arg)) + d.w.Write(newlineBytes) + } +} + +// Fdump formats and displays the passed arguments to io.Writer w. It formats +// exactly the same as Dump. +func Fdump(w io.Writer, a ...interface{}) { + fdump(&Config, w, a...) +} + +// Sdump returns a string with the passed arguments formatted exactly the same +// as Dump. +func Sdump(a ...interface{}) string { + var buf bytes.Buffer + fdump(&Config, &buf, a...) + return buf.String() +} + +/* +Dump displays the passed parameters to standard out with newlines, customizable +indentation, and additional debug information such as complete types and all +pointer addresses used to indirect to the final value. It provides the +following features over the built-in printing facilities provided by the fmt +package: + + * Pointers are dereferenced and followed + * Circular data structures are detected and handled properly + * Custom Stringer/error interfaces are optionally invoked, including + on unexported types + * Custom types which only implement the Stringer/error interfaces via + a pointer receiver are optionally invoked when passing non-pointer + variables + * Byte arrays and slices are dumped like the hexdump -C command which + includes offsets, byte values in hex, and ASCII output + +The configuration options are controlled by an exported package global, +spew.Config. See ConfigState for options documentation. + +See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to +get the formatted result as a string. +*/ +func Dump(a ...interface{}) { + fdump(&Config, os.Stdout, a...) +} diff --git a/vendor/github.com/davecgh/go-spew/spew/format.go b/vendor/github.com/davecgh/go-spew/spew/format.go new file mode 100644 index 0000000..b6b1fb0 --- /dev/null +++ b/vendor/github.com/davecgh/go-spew/spew/format.go @@ -0,0 +1,413 @@ +/* + * Copyright (c) 2013 Dave Collins + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +package spew + +import ( + "bytes" + "fmt" + "reflect" + "strconv" + "strings" +) + +// supportedFlags is a list of all the character flags supported by fmt package. +const supportedFlags = "0-+# " + +// formatState implements the fmt.Formatter interface and contains information +// about the state of a formatting operation. The NewFormatter function can +// be used to get a new Formatter which can be used directly as arguments +// in standard fmt package printing calls. +type formatState struct { + value interface{} + fs fmt.State + depth int + pointers map[uintptr]int + ignoreNextType bool + cs *ConfigState +} + +// buildDefaultFormat recreates the original format string without precision +// and width information to pass in to fmt.Sprintf in the case of an +// unrecognized type. Unless new types are added to the language, this +// function won't ever be called. +func (f *formatState) buildDefaultFormat() (format string) { + buf := bytes.NewBuffer(percentBytes) + + for _, flag := range supportedFlags { + if f.fs.Flag(int(flag)) { + buf.WriteRune(flag) + } + } + + buf.WriteRune('v') + + format = buf.String() + return format +} + +// constructOrigFormat recreates the original format string including precision +// and width information to pass along to the standard fmt package. This allows +// automatic deferral of all format strings this package doesn't support. +func (f *formatState) constructOrigFormat(verb rune) (format string) { + buf := bytes.NewBuffer(percentBytes) + + for _, flag := range supportedFlags { + if f.fs.Flag(int(flag)) { + buf.WriteRune(flag) + } + } + + if width, ok := f.fs.Width(); ok { + buf.WriteString(strconv.Itoa(width)) + } + + if precision, ok := f.fs.Precision(); ok { + buf.Write(precisionBytes) + buf.WriteString(strconv.Itoa(precision)) + } + + buf.WriteRune(verb) + + format = buf.String() + return format +} + +// unpackValue returns values inside of non-nil interfaces when possible and +// ensures that types for values which have been unpacked from an interface +// are displayed when the show types flag is also set. +// This is useful for data types like structs, arrays, slices, and maps which +// can contain varying types packed inside an interface. +func (f *formatState) unpackValue(v reflect.Value) reflect.Value { + if v.Kind() == reflect.Interface { + f.ignoreNextType = false + if !v.IsNil() { + v = v.Elem() + } + } + return v +} + +// formatPtr handles formatting of pointers by indirecting them as necessary. +func (f *formatState) formatPtr(v reflect.Value) { + // Display nil if top level pointer is nil. + showTypes := f.fs.Flag('#') + if v.IsNil() && (!showTypes || f.ignoreNextType) { + f.fs.Write(nilAngleBytes) + return + } + + // Remove pointers at or below the current depth from map used to detect + // circular refs. + for k, depth := range f.pointers { + if depth >= f.depth { + delete(f.pointers, k) + } + } + + // Keep list of all dereferenced pointers to possibly show later. + pointerChain := make([]uintptr, 0) + + // Figure out how many levels of indirection there are by derferencing + // pointers and unpacking interfaces down the chain while detecting circular + // references. + nilFound := false + cycleFound := false + indirects := 0 + ve := v + for ve.Kind() == reflect.Ptr { + if ve.IsNil() { + nilFound = true + break + } + indirects++ + addr := ve.Pointer() + pointerChain = append(pointerChain, addr) + if pd, ok := f.pointers[addr]; ok && pd < f.depth { + cycleFound = true + indirects-- + break + } + f.pointers[addr] = f.depth + + ve = ve.Elem() + if ve.Kind() == reflect.Interface { + if ve.IsNil() { + nilFound = true + break + } + ve = ve.Elem() + } + } + + // Display type or indirection level depending on flags. + if showTypes && !f.ignoreNextType { + f.fs.Write(openParenBytes) + f.fs.Write(bytes.Repeat(asteriskBytes, indirects)) + f.fs.Write([]byte(ve.Type().String())) + f.fs.Write(closeParenBytes) + } else { + if nilFound || cycleFound { + indirects += strings.Count(ve.Type().String(), "*") + } + f.fs.Write(openAngleBytes) + f.fs.Write([]byte(strings.Repeat("*", indirects))) + f.fs.Write(closeAngleBytes) + } + + // Display pointer information depending on flags. + if f.fs.Flag('+') && (len(pointerChain) > 0) { + f.fs.Write(openParenBytes) + for i, addr := range pointerChain { + if i > 0 { + f.fs.Write(pointerChainBytes) + } + printHexPtr(f.fs, addr) + } + f.fs.Write(closeParenBytes) + } + + // Display dereferenced value. + switch { + case nilFound == true: + f.fs.Write(nilAngleBytes) + + case cycleFound == true: + f.fs.Write(circularShortBytes) + + default: + f.ignoreNextType = true + f.format(ve) + } +} + +// format is the main workhorse for providing the Formatter interface. It +// uses the passed reflect value to figure out what kind of object we are +// dealing with and formats it appropriately. It is a recursive function, +// however circular data structures are detected and handled properly. +func (f *formatState) format(v reflect.Value) { + // Handle invalid reflect values immediately. + kind := v.Kind() + if kind == reflect.Invalid { + f.fs.Write(invalidAngleBytes) + return + } + + // Handle pointers specially. + if kind == reflect.Ptr { + f.formatPtr(v) + return + } + + // Print type information unless already handled elsewhere. + if !f.ignoreNextType && f.fs.Flag('#') { + f.fs.Write(openParenBytes) + f.fs.Write([]byte(v.Type().String())) + f.fs.Write(closeParenBytes) + } + f.ignoreNextType = false + + // Call Stringer/error interfaces if they exist and the handle methods + // flag is enabled. + if !f.cs.DisableMethods { + if (kind != reflect.Invalid) && (kind != reflect.Interface) { + if handled := handleMethods(f.cs, f.fs, v); handled { + return + } + } + } + + switch kind { + case reflect.Invalid: + // Do nothing. We should never get here since invalid has already + // been handled above. + + case reflect.Bool: + printBool(f.fs, v.Bool()) + + case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: + printInt(f.fs, v.Int(), 10) + + case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: + printUint(f.fs, v.Uint(), 10) + + case reflect.Float32: + printFloat(f.fs, v.Float(), 32) + + case reflect.Float64: + printFloat(f.fs, v.Float(), 64) + + case reflect.Complex64: + printComplex(f.fs, v.Complex(), 32) + + case reflect.Complex128: + printComplex(f.fs, v.Complex(), 64) + + case reflect.Slice: + if v.IsNil() { + f.fs.Write(nilAngleBytes) + break + } + fallthrough + + case reflect.Array: + f.fs.Write(openBracketBytes) + f.depth++ + if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { + f.fs.Write(maxShortBytes) + } else { + numEntries := v.Len() + for i := 0; i < numEntries; i++ { + if i > 0 { + f.fs.Write(spaceBytes) + } + f.ignoreNextType = true + f.format(f.unpackValue(v.Index(i))) + } + } + f.depth-- + f.fs.Write(closeBracketBytes) + + case reflect.String: + f.fs.Write([]byte(v.String())) + + case reflect.Interface: + // The only time we should get here is for nil interfaces due to + // unpackValue calls. + if v.IsNil() { + f.fs.Write(nilAngleBytes) + } + + case reflect.Ptr: + // Do nothing. We should never get here since pointers have already + // been handled above. + + case reflect.Map: + f.fs.Write(openMapBytes) + f.depth++ + if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { + f.fs.Write(maxShortBytes) + } else { + keys := v.MapKeys() + if f.cs.SortKeys { + sortValues(keys) + } + for i, key := range keys { + if i > 0 { + f.fs.Write(spaceBytes) + } + f.ignoreNextType = true + f.format(f.unpackValue(key)) + f.fs.Write(colonBytes) + f.ignoreNextType = true + f.format(f.unpackValue(v.MapIndex(key))) + } + } + f.depth-- + f.fs.Write(closeMapBytes) + + case reflect.Struct: + numFields := v.NumField() + f.fs.Write(openBraceBytes) + f.depth++ + if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { + f.fs.Write(maxShortBytes) + } else { + vt := v.Type() + for i := 0; i < numFields; i++ { + if i > 0 { + f.fs.Write(spaceBytes) + } + vtf := vt.Field(i) + if f.fs.Flag('+') || f.fs.Flag('#') { + f.fs.Write([]byte(vtf.Name)) + f.fs.Write(colonBytes) + } + f.format(f.unpackValue(v.Field(i))) + } + } + f.depth-- + f.fs.Write(closeBraceBytes) + + case reflect.Uintptr: + printHexPtr(f.fs, uintptr(v.Uint())) + + case reflect.UnsafePointer, reflect.Chan, reflect.Func: + printHexPtr(f.fs, v.Pointer()) + + // There were not any other types at the time this code was written, but + // fall back to letting the default fmt package handle it if any get added. + default: + format := f.buildDefaultFormat() + if v.CanInterface() { + fmt.Fprintf(f.fs, format, v.Interface()) + } else { + fmt.Fprintf(f.fs, format, v.String()) + } + } +} + +// Format satisfies the fmt.Formatter interface. See NewFormatter for usage +// details. +func (f *formatState) Format(fs fmt.State, verb rune) { + f.fs = fs + + // Use standard formatting for verbs that are not v. + if verb != 'v' { + format := f.constructOrigFormat(verb) + fmt.Fprintf(fs, format, f.value) + return + } + + if f.value == nil { + if fs.Flag('#') { + fs.Write(interfaceBytes) + } + fs.Write(nilAngleBytes) + return + } + + f.format(reflect.ValueOf(f.value)) +} + +// newFormatter is a helper function to consolidate the logic from the various +// public methods which take varying config states. +func newFormatter(cs *ConfigState, v interface{}) fmt.Formatter { + fs := &formatState{value: v, cs: cs} + fs.pointers = make(map[uintptr]int) + return fs +} + +/* +NewFormatter returns a custom formatter that satisfies the fmt.Formatter +interface. As a result, it integrates cleanly with standard fmt package +printing functions. The formatter is useful for inline printing of smaller data +types similar to the standard %v format specifier. + +The custom formatter only responds to the %v (most compact), %+v (adds pointer +addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb +combinations. Any other verbs such as %x and %q will be sent to the the +standard fmt package for formatting. In addition, the custom formatter ignores +the width and precision arguments (however they will still work on the format +specifiers not handled by the custom formatter). + +Typically this function shouldn't be called directly. It is much easier to make +use of the custom formatter by calling one of the convenience functions such as +Printf, Println, or Fprintf. +*/ +func NewFormatter(v interface{}) fmt.Formatter { + return newFormatter(&Config, v) +} diff --git a/vendor/github.com/davecgh/go-spew/spew/spew.go b/vendor/github.com/davecgh/go-spew/spew/spew.go new file mode 100644 index 0000000..d8233f5 --- /dev/null +++ b/vendor/github.com/davecgh/go-spew/spew/spew.go @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2013 Dave Collins + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +package spew + +import ( + "fmt" + "io" +) + +// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were +// passed with a default Formatter interface returned by NewFormatter. It +// returns the formatted string as a value that satisfies error. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Errorf(format, spew.NewFormatter(a), spew.NewFormatter(b)) +func Errorf(format string, a ...interface{}) (err error) { + return fmt.Errorf(format, convertArgs(a)...) +} + +// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were +// passed with a default Formatter interface returned by NewFormatter. It +// returns the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Fprint(w, spew.NewFormatter(a), spew.NewFormatter(b)) +func Fprint(w io.Writer, a ...interface{}) (n int, err error) { + return fmt.Fprint(w, convertArgs(a)...) +} + +// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were +// passed with a default Formatter interface returned by NewFormatter. It +// returns the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Fprintf(w, format, spew.NewFormatter(a), spew.NewFormatter(b)) +func Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { + return fmt.Fprintf(w, format, convertArgs(a)...) +} + +// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it +// passed with a default Formatter interface returned by NewFormatter. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Fprintln(w, spew.NewFormatter(a), spew.NewFormatter(b)) +func Fprintln(w io.Writer, a ...interface{}) (n int, err error) { + return fmt.Fprintln(w, convertArgs(a)...) +} + +// Print is a wrapper for fmt.Print that treats each argument as if it were +// passed with a default Formatter interface returned by NewFormatter. It +// returns the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Print(spew.NewFormatter(a), spew.NewFormatter(b)) +func Print(a ...interface{}) (n int, err error) { + return fmt.Print(convertArgs(a)...) +} + +// Printf is a wrapper for fmt.Printf that treats each argument as if it were +// passed with a default Formatter interface returned by NewFormatter. It +// returns the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Printf(format, spew.NewFormatter(a), spew.NewFormatter(b)) +func Printf(format string, a ...interface{}) (n int, err error) { + return fmt.Printf(format, convertArgs(a)...) +} + +// Println is a wrapper for fmt.Println that treats each argument as if it were +// passed with a default Formatter interface returned by NewFormatter. It +// returns the number of bytes written and any write error encountered. See +// NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Println(spew.NewFormatter(a), spew.NewFormatter(b)) +func Println(a ...interface{}) (n int, err error) { + return fmt.Println(convertArgs(a)...) +} + +// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were +// passed with a default Formatter interface returned by NewFormatter. It +// returns the resulting string. See NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Sprint(spew.NewFormatter(a), spew.NewFormatter(b)) +func Sprint(a ...interface{}) string { + return fmt.Sprint(convertArgs(a)...) +} + +// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were +// passed with a default Formatter interface returned by NewFormatter. It +// returns the resulting string. See NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Sprintf(format, spew.NewFormatter(a), spew.NewFormatter(b)) +func Sprintf(format string, a ...interface{}) string { + return fmt.Sprintf(format, convertArgs(a)...) +} + +// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it +// were passed with a default Formatter interface returned by NewFormatter. It +// returns the resulting string. See NewFormatter for formatting details. +// +// This function is shorthand for the following syntax: +// +// fmt.Sprintln(spew.NewFormatter(a), spew.NewFormatter(b)) +func Sprintln(a ...interface{}) string { + return fmt.Sprintln(convertArgs(a)...) +} + +// convertArgs accepts a slice of arguments and returns a slice of the same +// length with each argument converted to a default spew Formatter interface. +func convertArgs(args []interface{}) (formatters []interface{}) { + formatters = make([]interface{}, len(args)) + for index, arg := range args { + formatters[index] = NewFormatter(arg) + } + return formatters +} diff --git a/vendor/github.com/joho/godotenv/.gitignore b/vendor/github.com/joho/godotenv/.gitignore new file mode 100644 index 0000000..e43b0f9 --- /dev/null +++ b/vendor/github.com/joho/godotenv/.gitignore @@ -0,0 +1 @@ +.DS_Store diff --git a/vendor/github.com/joho/godotenv/LICENCE b/vendor/github.com/joho/godotenv/LICENCE new file mode 100644 index 0000000..e7ddd51 --- /dev/null +++ b/vendor/github.com/joho/godotenv/LICENCE @@ -0,0 +1,23 @@ +Copyright (c) 2013 John Barton + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/github.com/joho/godotenv/README.md b/vendor/github.com/joho/godotenv/README.md new file mode 100644 index 0000000..05c47e6 --- /dev/null +++ b/vendor/github.com/joho/godotenv/README.md @@ -0,0 +1,127 @@ +# GoDotEnv [![wercker status](https://app.wercker.com/status/507594c2ec7e60f19403a568dfea0f78 "wercker status")](https://app.wercker.com/project/bykey/507594c2ec7e60f19403a568dfea0f78) + +A Go (golang) port of the Ruby dotenv project (which loads env vars from a .env file) + +From the original Library: + +> Storing configuration in the environment is one of the tenets of a twelve-factor app. Anything that is likely to change between deployment environments–such as resource handles for databases or credentials for external services–should be extracted from the code into environment variables. +> +> But it is not always practical to set environment variables on development machines or continuous integration servers where multiple projects are run. Dotenv load variables from a .env file into ENV when the environment is bootstrapped. + +It can be used as a library (for loading in env for your own daemons etc) or as a bin command. + +There is test coverage and CI for both linuxish and windows environments, but I make no guarantees about the bin version working on windows. + +## Installation + +As a library + +```shell +go get github.com/joho/godotenv +``` + +or if you want to use it as a bin command +```shell +go get github.com/joho/godotenv/cmd/godotenv +``` + +## Usage + +Add your application configuration to your `.env` file in the root of your project: + +```shell +S3_BUCKET=YOURS3BUCKET +SECRET_KEY=YOURSECRETKEYGOESHERE +``` + +Then in your Go app you can do something like + +```go +package main + +import ( + "github.com/joho/godotenv" + "log" + "os" +) + +func main() { + err := godotenv.Load() + if err != nil { + log.Fatal("Error loading .env file") + } + + s3Bucket := os.Getenv("S3_BUCKET") + secretKey := os.Getenv("SECRET_KEY") + + // now do something with s3 or whatever +} +``` + +If you're even lazier than that, you can just take advantage of the autoload package which will read in `.env` on import + +```go +import _ "github.com/joho/godotenv/autoload" +``` + +While `.env` in the project root is the default, you don't have to be constrained, both examples below are 100% legit + +```go +_ = godotenv.Load("somerandomfile") +_ = godotenv.Load("filenumberone.env", "filenumbertwo.env") +``` + +If you want to be really fancy with your env file you can do comments and exports (below is a valid env file) + +```shell +# I am a comment and that is OK +SOME_VAR=someval +FOO=BAR # comments at line end are OK too +export BAR=BAZ +``` + +Or finally you can do YAML(ish) style + +```yaml +FOO: bar +BAR: baz +``` + +as a final aside, if you don't want godotenv munging your env you can just get a map back instead + +```go +var myEnv map[string]string +myEnv, err := godotenv.Read() + +s3Bucket := myEnv["S3_BUCKET"] +``` + +### Command Mode + +Assuming you've installed the command as above and you've got `$GOPATH/bin` in your `$PATH` + +``` +godotenv -f /some/path/to/.env some_command with some args +``` + +If you don't specify `-f` it will fall back on the default of loading `.env` in `PWD` + +## Contributing + +Contributions are most welcome! The parser itself is pretty stupidly naive and I wouldn't be surprised if it breaks with edge cases. + +*code changes without tests will not be accepted* + +1. Fork it +2. Create your feature branch (`git checkout -b my-new-feature`) +3. Commit your changes (`git commit -am 'Added some feature'`) +4. Push to the branch (`git push origin my-new-feature`) +5. Create new Pull Request + +## CI + +Linux: [![wercker status](https://app.wercker.com/status/507594c2ec7e60f19403a568dfea0f78/m "wercker status")](https://app.wercker.com/project/bykey/507594c2ec7e60f19403a568dfea0f78) Windows: [![Build status](https://ci.appveyor.com/api/projects/status/9v40vnfvvgde64u4)](https://ci.appveyor.com/project/joho/godotenv) + +## Who? + +The original library [dotenv](https://github.com/bkeepers/dotenv) was written by [Brandon Keepers](http://opensoul.org/), and this port was done by [John Barton](http://whoisjohnbarton.com) based off the tests/fixtures in the original library. diff --git a/vendor/github.com/joho/godotenv/autoload/autoload.go b/vendor/github.com/joho/godotenv/autoload/autoload.go new file mode 100644 index 0000000..fbcd2bd --- /dev/null +++ b/vendor/github.com/joho/godotenv/autoload/autoload.go @@ -0,0 +1,15 @@ +package autoload + +/* + You can just read the .env file on import just by doing + + import _ "github.com/joho/godotenv/autoload" + + And bob's your mother's brother +*/ + +import "github.com/joho/godotenv" + +func init() { + godotenv.Load() +} diff --git a/vendor/github.com/joho/godotenv/cmd/godotenv/cmd.go b/vendor/github.com/joho/godotenv/cmd/godotenv/cmd.go new file mode 100644 index 0000000..04a9f64 --- /dev/null +++ b/vendor/github.com/joho/godotenv/cmd/godotenv/cmd.go @@ -0,0 +1,54 @@ +package main + +import ( + "flag" + "fmt" + "log" + + "strings" + + "github.com/joho/godotenv" +) + +func main() { + var showHelp bool + flag.BoolVar(&showHelp, "h", false, "show help") + var rawEnvFilenames string + flag.StringVar(&rawEnvFilenames, "f", "", "comma separated paths to .env files") + + flag.Parse() + + usage := ` +Run a process with a env setup from a .env file + +godotenv [-f ENV_FILE_PATHS] COMMAND_ARGS + +ENV_FILE_PATHS: comma separated paths to .env files +COMMAND_ARGS: command and args you want to run + +example + godotenv -f /path/to/something/.env,/another/path/.env fortune +` + // if no args or -h flag + // print usage and return + args := flag.Args() + if showHelp || len(args) == 0 { + fmt.Println(usage) + return + } + + // load env + var envFilenames []string + if rawEnvFilenames != "" { + envFilenames = strings.Split(rawEnvFilenames, ",") + } + + // take rest of args and "exec" them + cmd := args[0] + cmdArgs := args[1:] + + err := godotenv.Exec(envFilenames, cmd, cmdArgs) + if err != nil { + log.Fatal(err) + } +} diff --git a/vendor/github.com/joho/godotenv/fixtures/equals.env b/vendor/github.com/joho/godotenv/fixtures/equals.env new file mode 100644 index 0000000..594c532 --- /dev/null +++ b/vendor/github.com/joho/godotenv/fixtures/equals.env @@ -0,0 +1,2 @@ +export OPTION_A='postgres://localhost:5432/database?sslmode=disable' + diff --git a/vendor/github.com/joho/godotenv/fixtures/exported.env b/vendor/github.com/joho/godotenv/fixtures/exported.env new file mode 100644 index 0000000..5821377 --- /dev/null +++ b/vendor/github.com/joho/godotenv/fixtures/exported.env @@ -0,0 +1,2 @@ +export OPTION_A=2 +export OPTION_B='\n' diff --git a/vendor/github.com/joho/godotenv/fixtures/plain.env b/vendor/github.com/joho/godotenv/fixtures/plain.env new file mode 100644 index 0000000..c983b06 --- /dev/null +++ b/vendor/github.com/joho/godotenv/fixtures/plain.env @@ -0,0 +1,5 @@ +OPTION_A=1 +OPTION_B=2 +OPTION_C= 3 +OPTION_D =4 +OPTION_E = 5 diff --git a/vendor/github.com/joho/godotenv/fixtures/quoted.env b/vendor/github.com/joho/godotenv/fixtures/quoted.env new file mode 100644 index 0000000..a03ce24 --- /dev/null +++ b/vendor/github.com/joho/godotenv/fixtures/quoted.env @@ -0,0 +1,8 @@ +OPTION_A='1' +OPTION_B='2' +OPTION_C='' +OPTION_D='\n' +OPTION_E="1" +OPTION_F="2" +OPTION_G="" +OPTION_H="\n" diff --git a/vendor/github.com/joho/godotenv/godotenv.go b/vendor/github.com/joho/godotenv/godotenv.go new file mode 100644 index 0000000..94b2676 --- /dev/null +++ b/vendor/github.com/joho/godotenv/godotenv.go @@ -0,0 +1,229 @@ +// Package godotenv is a go port of the ruby dotenv library (https://github.com/bkeepers/dotenv) +// +// Examples/readme can be found on the github page at https://github.com/joho/godotenv +// +// The TL;DR is that you make a .env file that looks something like +// +// SOME_ENV_VAR=somevalue +// +// and then in your go code you can call +// +// godotenv.Load() +// +// and all the env vars declared in .env will be avaiable through os.Getenv("SOME_ENV_VAR") +package godotenv + +import ( + "bufio" + "errors" + "os" + "os/exec" + "strings" +) + +// Load will read your env file(s) and load them into ENV for this process. +// +// Call this function as close as possible to the start of your program (ideally in main) +// +// If you call Load without any args it will default to loading .env in the current path +// +// You can otherwise tell it which files to load (there can be more than one) like +// +// godotenv.Load("fileone", "filetwo") +// +// It's important to note that it WILL NOT OVERRIDE an env variable that already exists - consider the .env file to set dev vars or sensible defaults +func Load(filenames ...string) (err error) { + filenames = filenamesOrDefault(filenames) + + for _, filename := range filenames { + err = loadFile(filename, false) + if err != nil { + return // return early on a spazout + } + } + return +} + +// Overload will read your env file(s) and load them into ENV for this process. +// +// Call this function as close as possible to the start of your program (ideally in main) +// +// If you call Overload without any args it will default to loading .env in the current path +// +// You can otherwise tell it which files to load (there can be more than one) like +// +// godotenv.Overload("fileone", "filetwo") +// +// It's important to note this WILL OVERRIDE an env variable that already exists - consider the .env file to forcefilly set all vars. +func Overload(filenames ...string) (err error) { + filenames = filenamesOrDefault(filenames) + + for _, filename := range filenames { + err = loadFile(filename, true) + if err != nil { + return // return early on a spazout + } + } + return +} + +// Read all env (with same file loading semantics as Load) but return values as +// a map rather than automatically writing values into env +func Read(filenames ...string) (envMap map[string]string, err error) { + filenames = filenamesOrDefault(filenames) + envMap = make(map[string]string) + + for _, filename := range filenames { + individualEnvMap, individualErr := readFile(filename) + + if individualErr != nil { + err = individualErr + return // return early on a spazout + } + + for key, value := range individualEnvMap { + envMap[key] = value + } + } + + return +} + +// Exec loads env vars from the specified filenames (empty map falls back to default) +// then executes the cmd specified. +// +// Simply hooks up os.Stdin/err/out to the command and calls Run() +// +// If you want more fine grained control over your command it's recommended +// that you use `Load()` or `Read()` and the `os/exec` package yourself. +func Exec(filenames []string, cmd string, cmdArgs []string) error { + Load(filenames...) + + command := exec.Command(cmd, cmdArgs...) + command.Stdin = os.Stdin + command.Stdout = os.Stdout + command.Stderr = os.Stderr + return command.Run() +} + +func filenamesOrDefault(filenames []string) []string { + if len(filenames) == 0 { + return []string{".env"} + } + return filenames +} + +func loadFile(filename string, overload bool) error { + envMap, err := readFile(filename) + if err != nil { + return err + } + + for key, value := range envMap { + if os.Getenv(key) == "" || overload { + os.Setenv(key, value) + } + } + + return nil +} + +func readFile(filename string) (envMap map[string]string, err error) { + file, err := os.Open(filename) + if err != nil { + return + } + defer file.Close() + + envMap = make(map[string]string) + + var lines []string + scanner := bufio.NewScanner(file) + for scanner.Scan() { + lines = append(lines, scanner.Text()) + } + + for _, fullLine := range lines { + if !isIgnoredLine(fullLine) { + key, value, err := parseLine(fullLine) + + if err == nil { + envMap[key] = value + } + } + } + return +} + +func parseLine(line string) (key string, value string, err error) { + if len(line) == 0 { + err = errors.New("zero length string") + return + } + + // ditch the comments (but keep quoted hashes) + if strings.Contains(line, "#") { + segmentsBetweenHashes := strings.Split(line, "#") + quotesAreOpen := false + var segmentsToKeep []string + for _, segment := range segmentsBetweenHashes { + if strings.Count(segment, "\"") == 1 || strings.Count(segment, "'") == 1 { + if quotesAreOpen { + quotesAreOpen = false + segmentsToKeep = append(segmentsToKeep, segment) + } else { + quotesAreOpen = true + } + } + + if len(segmentsToKeep) == 0 || quotesAreOpen { + segmentsToKeep = append(segmentsToKeep, segment) + } + } + + line = strings.Join(segmentsToKeep, "#") + } + + // now split key from value + splitString := strings.SplitN(line, "=", 2) + + if len(splitString) != 2 { + // try yaml mode! + splitString = strings.SplitN(line, ":", 2) + } + + if len(splitString) != 2 { + err = errors.New("Can't separate key from value") + return + } + + // Parse the key + key = splitString[0] + if strings.HasPrefix(key, "export") { + key = strings.TrimPrefix(key, "export") + } + key = strings.Trim(key, " ") + + // Parse the value + value = splitString[1] + // trim + value = strings.Trim(value, " ") + + // check if we've got quoted values + if strings.Count(value, "\"") == 2 || strings.Count(value, "'") == 2 { + // pull the quotes off the edges + value = strings.Trim(value, "\"'") + + // expand quotes + value = strings.Replace(value, "\\\"", "\"", -1) + // expand newlines + value = strings.Replace(value, "\\n", "\n", -1) + } + + return +} + +func isIgnoredLine(line string) bool { + trimmedLine := strings.Trim(line, " \n\t") + return len(trimmedLine) == 0 || strings.HasPrefix(trimmedLine, "#") +} diff --git a/vendor/github.com/joho/godotenv/wercker.yml b/vendor/github.com/joho/godotenv/wercker.yml new file mode 100644 index 0000000..c716ac9 --- /dev/null +++ b/vendor/github.com/joho/godotenv/wercker.yml @@ -0,0 +1 @@ +box: pjvds/golang diff --git a/vendor/github.com/pmezard/go-difflib/LICENSE b/vendor/github.com/pmezard/go-difflib/LICENSE new file mode 100644 index 0000000..c67dad6 --- /dev/null +++ b/vendor/github.com/pmezard/go-difflib/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013, Patrick Mezard +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + The names of its contributors may not be used to endorse or promote +products derived from this software without specific prior written +permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/pmezard/go-difflib/difflib/difflib.go b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go new file mode 100644 index 0000000..003e99f --- /dev/null +++ b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go @@ -0,0 +1,772 @@ +// Package difflib is a partial port of Python difflib module. +// +// It provides tools to compare sequences of strings and generate textual diffs. +// +// The following class and functions have been ported: +// +// - SequenceMatcher +// +// - unified_diff +// +// - context_diff +// +// Getting unified diffs was the main goal of the port. Keep in mind this code +// is mostly suitable to output text differences in a human friendly way, there +// are no guarantees generated diffs are consumable by patch(1). +package difflib + +import ( + "bufio" + "bytes" + "fmt" + "io" + "strings" +) + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +func calculateRatio(matches, length int) float64 { + if length > 0 { + return 2.0 * float64(matches) / float64(length) + } + return 1.0 +} + +type Match struct { + A int + B int + Size int +} + +type OpCode struct { + Tag byte + I1 int + I2 int + J1 int + J2 int +} + +// SequenceMatcher compares sequence of strings. The basic +// algorithm predates, and is a little fancier than, an algorithm +// published in the late 1980's by Ratcliff and Obershelp under the +// hyperbolic name "gestalt pattern matching". The basic idea is to find +// the longest contiguous matching subsequence that contains no "junk" +// elements (R-O doesn't address junk). The same idea is then applied +// recursively to the pieces of the sequences to the left and to the right +// of the matching subsequence. This does not yield minimal edit +// sequences, but does tend to yield matches that "look right" to people. +// +// SequenceMatcher tries to compute a "human-friendly diff" between two +// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the +// longest *contiguous* & junk-free matching subsequence. That's what +// catches peoples' eyes. The Windows(tm) windiff has another interesting +// notion, pairing up elements that appear uniquely in each sequence. +// That, and the method here, appear to yield more intuitive difference +// reports than does diff. This method appears to be the least vulnerable +// to synching up on blocks of "junk lines", though (like blank lines in +// ordinary text files, or maybe "

" lines in HTML files). That may be +// because this is the only method of the 3 that has a *concept* of +// "junk" . +// +// Timing: Basic R-O is cubic time worst case and quadratic time expected +// case. SequenceMatcher is quadratic time for the worst case and has +// expected-case behavior dependent in a complicated way on how many +// elements the sequences have in common; best case time is linear. +type SequenceMatcher struct { + a []string + b []string + b2j map[string][]int + IsJunk func(string) bool + autoJunk bool + bJunk map[string]struct{} + matchingBlocks []Match + fullBCount map[string]int + bPopular map[string]struct{} + opCodes []OpCode +} + +func NewMatcher(a, b []string) *SequenceMatcher { + m := SequenceMatcher{autoJunk: true} + m.SetSeqs(a, b) + return &m +} + +func NewMatcherWithJunk(a, b []string, autoJunk bool, + isJunk func(string) bool) *SequenceMatcher { + + m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk} + m.SetSeqs(a, b) + return &m +} + +// Set two sequences to be compared. +func (m *SequenceMatcher) SetSeqs(a, b []string) { + m.SetSeq1(a) + m.SetSeq2(b) +} + +// Set the first sequence to be compared. The second sequence to be compared is +// not changed. +// +// SequenceMatcher computes and caches detailed information about the second +// sequence, so if you want to compare one sequence S against many sequences, +// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other +// sequences. +// +// See also SetSeqs() and SetSeq2(). +func (m *SequenceMatcher) SetSeq1(a []string) { + if &a == &m.a { + return + } + m.a = a + m.matchingBlocks = nil + m.opCodes = nil +} + +// Set the second sequence to be compared. The first sequence to be compared is +// not changed. +func (m *SequenceMatcher) SetSeq2(b []string) { + if &b == &m.b { + return + } + m.b = b + m.matchingBlocks = nil + m.opCodes = nil + m.fullBCount = nil + m.chainB() +} + +func (m *SequenceMatcher) chainB() { + // Populate line -> index mapping + b2j := map[string][]int{} + for i, s := range m.b { + indices := b2j[s] + indices = append(indices, i) + b2j[s] = indices + } + + // Purge junk elements + m.bJunk = map[string]struct{}{} + if m.IsJunk != nil { + junk := m.bJunk + for s, _ := range b2j { + if m.IsJunk(s) { + junk[s] = struct{}{} + } + } + for s, _ := range junk { + delete(b2j, s) + } + } + + // Purge remaining popular elements + popular := map[string]struct{}{} + n := len(m.b) + if m.autoJunk && n >= 200 { + ntest := n/100 + 1 + for s, indices := range b2j { + if len(indices) > ntest { + popular[s] = struct{}{} + } + } + for s, _ := range popular { + delete(b2j, s) + } + } + m.bPopular = popular + m.b2j = b2j +} + +func (m *SequenceMatcher) isBJunk(s string) bool { + _, ok := m.bJunk[s] + return ok +} + +// Find longest matching block in a[alo:ahi] and b[blo:bhi]. +// +// If IsJunk is not defined: +// +// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where +// alo <= i <= i+k <= ahi +// blo <= j <= j+k <= bhi +// and for all (i',j',k') meeting those conditions, +// k >= k' +// i <= i' +// and if i == i', j <= j' +// +// In other words, of all maximal matching blocks, return one that +// starts earliest in a, and of all those maximal matching blocks that +// start earliest in a, return the one that starts earliest in b. +// +// If IsJunk is defined, first the longest matching block is +// determined as above, but with the additional restriction that no +// junk element appears in the block. Then that block is extended as +// far as possible by matching (only) junk elements on both sides. So +// the resulting block never matches on junk except as identical junk +// happens to be adjacent to an "interesting" match. +// +// If no blocks match, return (alo, blo, 0). +func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match { + // CAUTION: stripping common prefix or suffix would be incorrect. + // E.g., + // ab + // acab + // Longest matching block is "ab", but if common prefix is + // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so + // strip, so ends up claiming that ab is changed to acab by + // inserting "ca" in the middle. That's minimal but unintuitive: + // "it's obvious" that someone inserted "ac" at the front. + // Windiff ends up at the same place as diff, but by pairing up + // the unique 'b's and then matching the first two 'a's. + besti, bestj, bestsize := alo, blo, 0 + + // find longest junk-free match + // during an iteration of the loop, j2len[j] = length of longest + // junk-free match ending with a[i-1] and b[j] + j2len := map[int]int{} + for i := alo; i != ahi; i++ { + // look at all instances of a[i] in b; note that because + // b2j has no junk keys, the loop is skipped if a[i] is junk + newj2len := map[int]int{} + for _, j := range m.b2j[m.a[i]] { + // a[i] matches b[j] + if j < blo { + continue + } + if j >= bhi { + break + } + k := j2len[j-1] + 1 + newj2len[j] = k + if k > bestsize { + besti, bestj, bestsize = i-k+1, j-k+1, k + } + } + j2len = newj2len + } + + // Extend the best by non-junk elements on each end. In particular, + // "popular" non-junk elements aren't in b2j, which greatly speeds + // the inner loop above, but also means "the best" match so far + // doesn't contain any junk *or* popular non-junk elements. + for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) && + m.a[besti-1] == m.b[bestj-1] { + besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 + } + for besti+bestsize < ahi && bestj+bestsize < bhi && + !m.isBJunk(m.b[bestj+bestsize]) && + m.a[besti+bestsize] == m.b[bestj+bestsize] { + bestsize += 1 + } + + // Now that we have a wholly interesting match (albeit possibly + // empty!), we may as well suck up the matching junk on each + // side of it too. Can't think of a good reason not to, and it + // saves post-processing the (possibly considerable) expense of + // figuring out what to do with it. In the case of an empty + // interesting match, this is clearly the right thing to do, + // because no other kind of match is possible in the regions. + for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) && + m.a[besti-1] == m.b[bestj-1] { + besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 + } + for besti+bestsize < ahi && bestj+bestsize < bhi && + m.isBJunk(m.b[bestj+bestsize]) && + m.a[besti+bestsize] == m.b[bestj+bestsize] { + bestsize += 1 + } + + return Match{A: besti, B: bestj, Size: bestsize} +} + +// Return list of triples describing matching subsequences. +// +// Each triple is of the form (i, j, n), and means that +// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in +// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are +// adjacent triples in the list, and the second is not the last triple in the +// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe +// adjacent equal blocks. +// +// The last triple is a dummy, (len(a), len(b), 0), and is the only +// triple with n==0. +func (m *SequenceMatcher) GetMatchingBlocks() []Match { + if m.matchingBlocks != nil { + return m.matchingBlocks + } + + var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match + matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match { + match := m.findLongestMatch(alo, ahi, blo, bhi) + i, j, k := match.A, match.B, match.Size + if match.Size > 0 { + if alo < i && blo < j { + matched = matchBlocks(alo, i, blo, j, matched) + } + matched = append(matched, match) + if i+k < ahi && j+k < bhi { + matched = matchBlocks(i+k, ahi, j+k, bhi, matched) + } + } + return matched + } + matched := matchBlocks(0, len(m.a), 0, len(m.b), nil) + + // It's possible that we have adjacent equal blocks in the + // matching_blocks list now. + nonAdjacent := []Match{} + i1, j1, k1 := 0, 0, 0 + for _, b := range matched { + // Is this block adjacent to i1, j1, k1? + i2, j2, k2 := b.A, b.B, b.Size + if i1+k1 == i2 && j1+k1 == j2 { + // Yes, so collapse them -- this just increases the length of + // the first block by the length of the second, and the first + // block so lengthened remains the block to compare against. + k1 += k2 + } else { + // Not adjacent. Remember the first block (k1==0 means it's + // the dummy we started with), and make the second block the + // new block to compare against. + if k1 > 0 { + nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) + } + i1, j1, k1 = i2, j2, k2 + } + } + if k1 > 0 { + nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) + } + + nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0}) + m.matchingBlocks = nonAdjacent + return m.matchingBlocks +} + +// Return list of 5-tuples describing how to turn a into b. +// +// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple +// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the +// tuple preceding it, and likewise for j1 == the previous j2. +// +// The tags are characters, with these meanings: +// +// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2] +// +// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case. +// +// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case. +// +// 'e' (equal): a[i1:i2] == b[j1:j2] +func (m *SequenceMatcher) GetOpCodes() []OpCode { + if m.opCodes != nil { + return m.opCodes + } + i, j := 0, 0 + matching := m.GetMatchingBlocks() + opCodes := make([]OpCode, 0, len(matching)) + for _, m := range matching { + // invariant: we've pumped out correct diffs to change + // a[:i] into b[:j], and the next matching block is + // a[ai:ai+size] == b[bj:bj+size]. So we need to pump + // out a diff to change a[i:ai] into b[j:bj], pump out + // the matching block, and move (i,j) beyond the match + ai, bj, size := m.A, m.B, m.Size + tag := byte(0) + if i < ai && j < bj { + tag = 'r' + } else if i < ai { + tag = 'd' + } else if j < bj { + tag = 'i' + } + if tag > 0 { + opCodes = append(opCodes, OpCode{tag, i, ai, j, bj}) + } + i, j = ai+size, bj+size + // the list of matching blocks is terminated by a + // sentinel with size 0 + if size > 0 { + opCodes = append(opCodes, OpCode{'e', ai, i, bj, j}) + } + } + m.opCodes = opCodes + return m.opCodes +} + +// Isolate change clusters by eliminating ranges with no changes. +// +// Return a generator of groups with up to n lines of context. +// Each group is in the same format as returned by GetOpCodes(). +func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { + if n < 0 { + n = 3 + } + codes := m.GetOpCodes() + if len(codes) == 0 { + codes = []OpCode{OpCode{'e', 0, 1, 0, 1}} + } + // Fixup leading and trailing groups if they show no changes. + if codes[0].Tag == 'e' { + c := codes[0] + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2} + } + if codes[len(codes)-1].Tag == 'e' { + c := codes[len(codes)-1] + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)} + } + nn := n + n + groups := [][]OpCode{} + group := []OpCode{} + for _, c := range codes { + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + // End the current group and start a new one whenever + // there is a large range with no changes. + if c.Tag == 'e' && i2-i1 > nn { + group = append(group, OpCode{c.Tag, i1, min(i2, i1+n), + j1, min(j2, j1+n)}) + groups = append(groups, group) + group = []OpCode{} + i1, j1 = max(i1, i2-n), max(j1, j2-n) + } + group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) + } + if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { + groups = append(groups, group) + } + return groups +} + +// Return a measure of the sequences' similarity (float in [0,1]). +// +// Where T is the total number of elements in both sequences, and +// M is the number of matches, this is 2.0*M / T. +// Note that this is 1 if the sequences are identical, and 0 if +// they have nothing in common. +// +// .Ratio() is expensive to compute if you haven't already computed +// .GetMatchingBlocks() or .GetOpCodes(), in which case you may +// want to try .QuickRatio() or .RealQuickRation() first to get an +// upper bound. +func (m *SequenceMatcher) Ratio() float64 { + matches := 0 + for _, m := range m.GetMatchingBlocks() { + matches += m.Size + } + return calculateRatio(matches, len(m.a)+len(m.b)) +} + +// Return an upper bound on ratio() relatively quickly. +// +// This isn't defined beyond that it is an upper bound on .Ratio(), and +// is faster to compute. +func (m *SequenceMatcher) QuickRatio() float64 { + // viewing a and b as multisets, set matches to the cardinality + // of their intersection; this counts the number of matches + // without regard to order, so is clearly an upper bound + if m.fullBCount == nil { + m.fullBCount = map[string]int{} + for _, s := range m.b { + m.fullBCount[s] = m.fullBCount[s] + 1 + } + } + + // avail[x] is the number of times x appears in 'b' less the + // number of times we've seen it in 'a' so far ... kinda + avail := map[string]int{} + matches := 0 + for _, s := range m.a { + n, ok := avail[s] + if !ok { + n = m.fullBCount[s] + } + avail[s] = n - 1 + if n > 0 { + matches += 1 + } + } + return calculateRatio(matches, len(m.a)+len(m.b)) +} + +// Return an upper bound on ratio() very quickly. +// +// This isn't defined beyond that it is an upper bound on .Ratio(), and +// is faster to compute than either .Ratio() or .QuickRatio(). +func (m *SequenceMatcher) RealQuickRatio() float64 { + la, lb := len(m.a), len(m.b) + return calculateRatio(min(la, lb), la+lb) +} + +// Convert range to the "ed" format +func formatRangeUnified(start, stop int) string { + // Per the diff spec at http://www.unix.org/single_unix_specification/ + beginning := start + 1 // lines start numbering with one + length := stop - start + if length == 1 { + return fmt.Sprintf("%d", beginning) + } + if length == 0 { + beginning -= 1 // empty ranges begin at line just before the range + } + return fmt.Sprintf("%d,%d", beginning, length) +} + +// Unified diff parameters +type UnifiedDiff struct { + A []string // First sequence lines + FromFile string // First file name + FromDate string // First file time + B []string // Second sequence lines + ToFile string // Second file name + ToDate string // Second file time + Eol string // Headers end of line, defaults to LF + Context int // Number of context lines +} + +// Compare two sequences of lines; generate the delta as a unified diff. +// +// Unified diffs are a compact way of showing line changes and a few +// lines of context. The number of context lines is set by 'n' which +// defaults to three. +// +// By default, the diff control lines (those with ---, +++, or @@) are +// created with a trailing newline. This is helpful so that inputs +// created from file.readlines() result in diffs that are suitable for +// file.writelines() since both the inputs and outputs have trailing +// newlines. +// +// For inputs that do not have trailing newlines, set the lineterm +// argument to "" so that the output will be uniformly newline free. +// +// The unidiff format normally has a header for filenames and modification +// times. Any or all of these may be specified using strings for +// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. +// The modification times are normally expressed in the ISO 8601 format. +func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { + buf := bufio.NewWriter(writer) + defer buf.Flush() + wf := func(format string, args ...interface{}) error { + _, err := buf.WriteString(fmt.Sprintf(format, args...)) + return err + } + ws := func(s string) error { + _, err := buf.WriteString(s) + return err + } + + if len(diff.Eol) == 0 { + diff.Eol = "\n" + } + + started := false + m := NewMatcher(diff.A, diff.B) + for _, g := range m.GetGroupedOpCodes(diff.Context) { + if !started { + started = true + fromDate := "" + if len(diff.FromDate) > 0 { + fromDate = "\t" + diff.FromDate + } + toDate := "" + if len(diff.ToDate) > 0 { + toDate = "\t" + diff.ToDate + } + if diff.FromFile != "" || diff.ToFile != "" { + err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) + if err != nil { + return err + } + err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) + if err != nil { + return err + } + } + } + first, last := g[0], g[len(g)-1] + range1 := formatRangeUnified(first.I1, last.I2) + range2 := formatRangeUnified(first.J1, last.J2) + if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { + return err + } + for _, c := range g { + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + if c.Tag == 'e' { + for _, line := range diff.A[i1:i2] { + if err := ws(" " + line); err != nil { + return err + } + } + continue + } + if c.Tag == 'r' || c.Tag == 'd' { + for _, line := range diff.A[i1:i2] { + if err := ws("-" + line); err != nil { + return err + } + } + } + if c.Tag == 'r' || c.Tag == 'i' { + for _, line := range diff.B[j1:j2] { + if err := ws("+" + line); err != nil { + return err + } + } + } + } + } + return nil +} + +// Like WriteUnifiedDiff but returns the diff a string. +func GetUnifiedDiffString(diff UnifiedDiff) (string, error) { + w := &bytes.Buffer{} + err := WriteUnifiedDiff(w, diff) + return string(w.Bytes()), err +} + +// Convert range to the "ed" format. +func formatRangeContext(start, stop int) string { + // Per the diff spec at http://www.unix.org/single_unix_specification/ + beginning := start + 1 // lines start numbering with one + length := stop - start + if length == 0 { + beginning -= 1 // empty ranges begin at line just before the range + } + if length <= 1 { + return fmt.Sprintf("%d", beginning) + } + return fmt.Sprintf("%d,%d", beginning, beginning+length-1) +} + +type ContextDiff UnifiedDiff + +// Compare two sequences of lines; generate the delta as a context diff. +// +// Context diffs are a compact way of showing line changes and a few +// lines of context. The number of context lines is set by diff.Context +// which defaults to three. +// +// By default, the diff control lines (those with *** or ---) are +// created with a trailing newline. +// +// For inputs that do not have trailing newlines, set the diff.Eol +// argument to "" so that the output will be uniformly newline free. +// +// The context diff format normally has a header for filenames and +// modification times. Any or all of these may be specified using +// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate. +// The modification times are normally expressed in the ISO 8601 format. +// If not specified, the strings default to blanks. +func WriteContextDiff(writer io.Writer, diff ContextDiff) error { + buf := bufio.NewWriter(writer) + defer buf.Flush() + var diffErr error + wf := func(format string, args ...interface{}) { + _, err := buf.WriteString(fmt.Sprintf(format, args...)) + if diffErr == nil && err != nil { + diffErr = err + } + } + ws := func(s string) { + _, err := buf.WriteString(s) + if diffErr == nil && err != nil { + diffErr = err + } + } + + if len(diff.Eol) == 0 { + diff.Eol = "\n" + } + + prefix := map[byte]string{ + 'i': "+ ", + 'd': "- ", + 'r': "! ", + 'e': " ", + } + + started := false + m := NewMatcher(diff.A, diff.B) + for _, g := range m.GetGroupedOpCodes(diff.Context) { + if !started { + started = true + fromDate := "" + if len(diff.FromDate) > 0 { + fromDate = "\t" + diff.FromDate + } + toDate := "" + if len(diff.ToDate) > 0 { + toDate = "\t" + diff.ToDate + } + if diff.FromFile != "" || diff.ToFile != "" { + wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol) + wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol) + } + } + + first, last := g[0], g[len(g)-1] + ws("***************" + diff.Eol) + + range1 := formatRangeContext(first.I1, last.I2) + wf("*** %s ****%s", range1, diff.Eol) + for _, c := range g { + if c.Tag == 'r' || c.Tag == 'd' { + for _, cc := range g { + if cc.Tag == 'i' { + continue + } + for _, line := range diff.A[cc.I1:cc.I2] { + ws(prefix[cc.Tag] + line) + } + } + break + } + } + + range2 := formatRangeContext(first.J1, last.J2) + wf("--- %s ----%s", range2, diff.Eol) + for _, c := range g { + if c.Tag == 'r' || c.Tag == 'i' { + for _, cc := range g { + if cc.Tag == 'd' { + continue + } + for _, line := range diff.B[cc.J1:cc.J2] { + ws(prefix[cc.Tag] + line) + } + } + break + } + } + } + return diffErr +} + +// Like WriteContextDiff but returns the diff a string. +func GetContextDiffString(diff ContextDiff) (string, error) { + w := &bytes.Buffer{} + err := WriteContextDiff(w, diff) + return string(w.Bytes()), err +} + +// Split a string on "\n" while preserving them. The output can be used +// as input for UnifiedDiff and ContextDiff structures. +func SplitLines(s string) []string { + lines := strings.SplitAfter(s, "\n") + lines[len(lines)-1] += "\n" + return lines +} diff --git a/vendor/github.com/stretchr/testify/LICENSE b/vendor/github.com/stretchr/testify/LICENSE new file mode 100644 index 0000000..473b670 --- /dev/null +++ b/vendor/github.com/stretchr/testify/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2012 - 2013 Mat Ryer and Tyler Bunnell + +Please consider promoting this project if you find it useful. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of the Software, +and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT +OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE +OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go new file mode 100644 index 0000000..e6a7960 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go @@ -0,0 +1,387 @@ +/* +* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen +* THIS FILE MUST NOT BE EDITED BY HAND +*/ + +package assert + +import ( + + http "net/http" + url "net/url" + time "time" +) + + +// Condition uses a Comparison to assert a complex condition. +func (a *Assertions) Condition(comp Comparison, msgAndArgs ...interface{}) bool { + return Condition(a.t, comp, msgAndArgs...) +} + + +// Contains asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// a.Contains("Hello World", "World", "But 'Hello World' does contain 'World'") +// a.Contains(["Hello", "World"], "World", "But ["Hello", "World"] does contain 'World'") +// a.Contains({"Hello": "World"}, "Hello", "But {'Hello': 'World'} does contain 'Hello'") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { + return Contains(a.t, s, contains, msgAndArgs...) +} + + +// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// a.Empty(obj) +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool { + return Empty(a.t, object, msgAndArgs...) +} + + +// Equal asserts that two objects are equal. +// +// a.Equal(123, 123, "123 and 123 should be equal") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + return Equal(a.t, expected, actual, msgAndArgs...) +} + + +// EqualError asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// if assert.Error(t, err, "An error was expected") { +// assert.Equal(t, err, expectedError) +// } +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) bool { + return EqualError(a.t, theError, errString, msgAndArgs...) +} + + +// EqualValues asserts that two objects are equal or convertable to the same types +// and equal. +// +// a.EqualValues(uint32(123), int32(123), "123 and 123 should be equal") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + return EqualValues(a.t, expected, actual, msgAndArgs...) +} + + +// Error asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if a.Error(err, "An error was expected") { +// assert.Equal(t, err, expectedError) +// } +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool { + return Error(a.t, err, msgAndArgs...) +} + + +// Exactly asserts that two objects are equal is value and type. +// +// a.Exactly(int32(123), int64(123), "123 and 123 should NOT be equal") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + return Exactly(a.t, expected, actual, msgAndArgs...) +} + + +// Fail reports a failure through +func (a *Assertions) Fail(failureMessage string, msgAndArgs ...interface{}) bool { + return Fail(a.t, failureMessage, msgAndArgs...) +} + + +// FailNow fails test +func (a *Assertions) FailNow(failureMessage string, msgAndArgs ...interface{}) bool { + return FailNow(a.t, failureMessage, msgAndArgs...) +} + + +// False asserts that the specified value is false. +// +// a.False(myBool, "myBool should be false") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) False(value bool, msgAndArgs ...interface{}) bool { + return False(a.t, value, msgAndArgs...) +} + + +// HTTPBodyContains asserts that a specified handler returns a +// body that contains a string. +// +// a.HTTPBodyContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) bool { + return HTTPBodyContains(a.t, handler, method, url, values, str) +} + + +// HTTPBodyNotContains asserts that a specified handler returns a +// body that does not contain a string. +// +// a.HTTPBodyNotContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) bool { + return HTTPBodyNotContains(a.t, handler, method, url, values, str) +} + + +// HTTPError asserts that a specified handler returns an error status code. +// +// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values) bool { + return HTTPError(a.t, handler, method, url, values) +} + + +// HTTPRedirect asserts that a specified handler returns a redirect status code. +// +// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values) bool { + return HTTPRedirect(a.t, handler, method, url, values) +} + + +// HTTPSuccess asserts that a specified handler returns a success status code. +// +// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values) bool { + return HTTPSuccess(a.t, handler, method, url, values) +} + + +// Implements asserts that an object is implemented by the specified interface. +// +// a.Implements((*MyInterface)(nil), new(MyObject), "MyObject") +func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { + return Implements(a.t, interfaceObject, object, msgAndArgs...) +} + + +// InDelta asserts that the two numerals are within delta of each other. +// +// a.InDelta(math.Pi, (22 / 7.0), 0.01) +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { + return InDelta(a.t, expected, actual, delta, msgAndArgs...) +} + + +// InDeltaSlice is the same as InDelta, except it compares two slices. +func (a *Assertions) InDeltaSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { + return InDeltaSlice(a.t, expected, actual, delta, msgAndArgs...) +} + + +// InEpsilon asserts that expected and actual have a relative error less than epsilon +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) InEpsilon(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { + return InEpsilon(a.t, expected, actual, epsilon, msgAndArgs...) +} + + +// InEpsilonSlice is the same as InEpsilon, except it compares two slices. +func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { + return InEpsilonSlice(a.t, expected, actual, delta, msgAndArgs...) +} + + +// IsType asserts that the specified objects are of the same type. +func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool { + return IsType(a.t, expectedType, object, msgAndArgs...) +} + + +// JSONEq asserts that two JSON strings are equivalent. +// +// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) bool { + return JSONEq(a.t, expected, actual, msgAndArgs...) +} + + +// Len asserts that the specified object has specific length. +// Len also fails if the object has a type that len() not accept. +// +// a.Len(mySlice, 3, "The size of slice is not 3") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) bool { + return Len(a.t, object, length, msgAndArgs...) +} + + +// Nil asserts that the specified object is nil. +// +// a.Nil(err, "err should be nothing") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) bool { + return Nil(a.t, object, msgAndArgs...) +} + + +// NoError asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if a.NoError(err) { +// assert.Equal(t, actualObj, expectedObj) +// } +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) bool { + return NoError(a.t, err, msgAndArgs...) +} + + +// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// a.NotContains("Hello World", "Earth", "But 'Hello World' does NOT contain 'Earth'") +// a.NotContains(["Hello", "World"], "Earth", "But ['Hello', 'World'] does NOT contain 'Earth'") +// a.NotContains({"Hello": "World"}, "Earth", "But {'Hello': 'World'} does NOT contain 'Earth'") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { + return NotContains(a.t, s, contains, msgAndArgs...) +} + + +// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if a.NotEmpty(obj) { +// assert.Equal(t, "two", obj[1]) +// } +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) bool { + return NotEmpty(a.t, object, msgAndArgs...) +} + + +// NotEqual asserts that the specified values are NOT equal. +// +// a.NotEqual(obj1, obj2, "two objects shouldn't be equal") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + return NotEqual(a.t, expected, actual, msgAndArgs...) +} + + +// NotNil asserts that the specified object is not nil. +// +// a.NotNil(err, "err should be something") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) bool { + return NotNil(a.t, object, msgAndArgs...) +} + + +// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// a.NotPanics(func(){ +// RemainCalm() +// }, "Calling RemainCalm() should NOT panic") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) NotPanics(f PanicTestFunc, msgAndArgs ...interface{}) bool { + return NotPanics(a.t, f, msgAndArgs...) +} + + +// NotRegexp asserts that a specified regexp does not match a string. +// +// a.NotRegexp(regexp.MustCompile("starts"), "it's starting") +// a.NotRegexp("^start", "it's not starting") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { + return NotRegexp(a.t, rx, str, msgAndArgs...) +} + + +// NotZero asserts that i is not the zero value for its type and returns the truth. +func (a *Assertions) NotZero(i interface{}, msgAndArgs ...interface{}) bool { + return NotZero(a.t, i, msgAndArgs...) +} + + +// Panics asserts that the code inside the specified PanicTestFunc panics. +// +// a.Panics(func(){ +// GoCrazy() +// }, "Calling GoCrazy() should panic") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool { + return Panics(a.t, f, msgAndArgs...) +} + + +// Regexp asserts that a specified regexp matches a string. +// +// a.Regexp(regexp.MustCompile("start"), "it's starting") +// a.Regexp("start...$", "it's not starting") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { + return Regexp(a.t, rx, str, msgAndArgs...) +} + + +// True asserts that the specified value is true. +// +// a.True(myBool, "myBool should be true") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) True(value bool, msgAndArgs ...interface{}) bool { + return True(a.t, value, msgAndArgs...) +} + + +// WithinDuration asserts that the two times are within duration delta of each other. +// +// a.WithinDuration(time.Now(), time.Now(), 10*time.Second, "The difference should not be more than 10s") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { + return WithinDuration(a.t, expected, actual, delta, msgAndArgs...) +} + + +// Zero asserts that i is the zero value for its type and returns the truth. +func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) bool { + return Zero(a.t, i, msgAndArgs...) +} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl b/vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl new file mode 100644 index 0000000..99f9acf --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl @@ -0,0 +1,4 @@ +{{.CommentWithoutT "a"}} +func (a *Assertions) {{.DocInfo.Name}}({{.Params}}) bool { + return {{.DocInfo.Name}}(a.t, {{.ForwardedParams}}) +} diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go new file mode 100644 index 0000000..d7c16c5 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/assertions.go @@ -0,0 +1,1004 @@ +package assert + +import ( + "bufio" + "bytes" + "encoding/json" + "fmt" + "math" + "reflect" + "regexp" + "runtime" + "strings" + "time" + "unicode" + "unicode/utf8" + + "github.com/davecgh/go-spew/spew" + "github.com/pmezard/go-difflib/difflib" +) + +// TestingT is an interface wrapper around *testing.T +type TestingT interface { + Errorf(format string, args ...interface{}) +} + +// Comparison a custom function that returns true on success and false on failure +type Comparison func() (success bool) + +/* + Helper functions +*/ + +// ObjectsAreEqual determines if two objects are considered equal. +// +// This function does no assertion of any kind. +func ObjectsAreEqual(expected, actual interface{}) bool { + + if expected == nil || actual == nil { + return expected == actual + } + + return reflect.DeepEqual(expected, actual) + +} + +// ObjectsAreEqualValues gets whether two objects are equal, or if their +// values are equal. +func ObjectsAreEqualValues(expected, actual interface{}) bool { + if ObjectsAreEqual(expected, actual) { + return true + } + + actualType := reflect.TypeOf(actual) + if actualType == nil { + return false + } + expectedValue := reflect.ValueOf(expected) + if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) { + // Attempt comparison after type conversion + return reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), actual) + } + + return false +} + +/* CallerInfo is necessary because the assert functions use the testing object +internally, causing it to print the file:line of the assert method, rather than where +the problem actually occured in calling code.*/ + +// CallerInfo returns an array of strings containing the file and line number +// of each stack frame leading from the current test to the assert call that +// failed. +func CallerInfo() []string { + + pc := uintptr(0) + file := "" + line := 0 + ok := false + name := "" + + callers := []string{} + for i := 0; ; i++ { + pc, file, line, ok = runtime.Caller(i) + if !ok { + return nil + } + + // This is a huge edge case, but it will panic if this is the case, see #180 + if file == "" { + break + } + + parts := strings.Split(file, "/") + dir := parts[len(parts)-2] + file = parts[len(parts)-1] + if (dir != "assert" && dir != "mock" && dir != "require") || file == "mock_test.go" { + callers = append(callers, fmt.Sprintf("%s:%d", file, line)) + } + + f := runtime.FuncForPC(pc) + if f == nil { + break + } + name = f.Name() + // Drop the package + segments := strings.Split(name, ".") + name = segments[len(segments)-1] + if isTest(name, "Test") || + isTest(name, "Benchmark") || + isTest(name, "Example") { + break + } + } + + return callers +} + +// Stolen from the `go test` tool. +// isTest tells whether name looks like a test (or benchmark, according to prefix). +// It is a Test (say) if there is a character after Test that is not a lower-case letter. +// We don't want TesticularCancer. +func isTest(name, prefix string) bool { + if !strings.HasPrefix(name, prefix) { + return false + } + if len(name) == len(prefix) { // "Test" is ok + return true + } + rune, _ := utf8.DecodeRuneInString(name[len(prefix):]) + return !unicode.IsLower(rune) +} + +// getWhitespaceString returns a string that is long enough to overwrite the default +// output from the go testing framework. +func getWhitespaceString() string { + + _, file, line, ok := runtime.Caller(1) + if !ok { + return "" + } + parts := strings.Split(file, "/") + file = parts[len(parts)-1] + + return strings.Repeat(" ", len(fmt.Sprintf("%s:%d: ", file, line))) + +} + +func messageFromMsgAndArgs(msgAndArgs ...interface{}) string { + if len(msgAndArgs) == 0 || msgAndArgs == nil { + return "" + } + if len(msgAndArgs) == 1 { + return msgAndArgs[0].(string) + } + if len(msgAndArgs) > 1 { + return fmt.Sprintf(msgAndArgs[0].(string), msgAndArgs[1:]...) + } + return "" +} + +// Indents all lines of the message by appending a number of tabs to each line, in an output format compatible with Go's +// test printing (see inner comment for specifics) +func indentMessageLines(message string, tabs int) string { + outBuf := new(bytes.Buffer) + + for i, scanner := 0, bufio.NewScanner(strings.NewReader(message)); scanner.Scan(); i++ { + if i != 0 { + outBuf.WriteRune('\n') + } + for ii := 0; ii < tabs; ii++ { + outBuf.WriteRune('\t') + // Bizarrely, all lines except the first need one fewer tabs prepended, so deliberately advance the counter + // by 1 prematurely. + if ii == 0 && i > 0 { + ii++ + } + } + outBuf.WriteString(scanner.Text()) + } + + return outBuf.String() +} + +type failNower interface { + FailNow() +} + +// FailNow fails test +func FailNow(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool { + Fail(t, failureMessage, msgAndArgs...) + + // We cannot extend TestingT with FailNow() and + // maintain backwards compatibility, so we fallback + // to panicking when FailNow is not available in + // TestingT. + // See issue #263 + + if t, ok := t.(failNower); ok { + t.FailNow() + } else { + panic("test failed and t is missing `FailNow()`") + } + return false +} + +// Fail reports a failure through +func Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool { + + message := messageFromMsgAndArgs(msgAndArgs...) + + errorTrace := strings.Join(CallerInfo(), "\n\r\t\t\t") + if len(message) > 0 { + t.Errorf("\r%s\r\tError Trace:\t%s\n"+ + "\r\tError:%s\n"+ + "\r\tMessages:\t%s\n\r", + getWhitespaceString(), + errorTrace, + indentMessageLines(failureMessage, 2), + message) + } else { + t.Errorf("\r%s\r\tError Trace:\t%s\n"+ + "\r\tError:%s\n\r", + getWhitespaceString(), + errorTrace, + indentMessageLines(failureMessage, 2)) + } + + return false +} + +// Implements asserts that an object is implemented by the specified interface. +// +// assert.Implements(t, (*MyInterface)(nil), new(MyObject), "MyObject") +func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { + + interfaceType := reflect.TypeOf(interfaceObject).Elem() + + if !reflect.TypeOf(object).Implements(interfaceType) { + return Fail(t, fmt.Sprintf("%T must implement %v", object, interfaceType), msgAndArgs...) + } + + return true + +} + +// IsType asserts that the specified objects are of the same type. +func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool { + + if !ObjectsAreEqual(reflect.TypeOf(object), reflect.TypeOf(expectedType)) { + return Fail(t, fmt.Sprintf("Object expected to be of type %v, but was %v", reflect.TypeOf(expectedType), reflect.TypeOf(object)), msgAndArgs...) + } + + return true +} + +// Equal asserts that two objects are equal. +// +// assert.Equal(t, 123, 123, "123 and 123 should be equal") +// +// Returns whether the assertion was successful (true) or not (false). +func Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + + if !ObjectsAreEqual(expected, actual) { + diff := diff(expected, actual) + return Fail(t, fmt.Sprintf("Not equal: %#v (expected)\n"+ + " != %#v (actual)%s", expected, actual, diff), msgAndArgs...) + } + + return true + +} + +// EqualValues asserts that two objects are equal or convertable to the same types +// and equal. +// +// assert.EqualValues(t, uint32(123), int32(123), "123 and 123 should be equal") +// +// Returns whether the assertion was successful (true) or not (false). +func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + + if !ObjectsAreEqualValues(expected, actual) { + return Fail(t, fmt.Sprintf("Not equal: %#v (expected)\n"+ + " != %#v (actual)", expected, actual), msgAndArgs...) + } + + return true + +} + +// Exactly asserts that two objects are equal is value and type. +// +// assert.Exactly(t, int32(123), int64(123), "123 and 123 should NOT be equal") +// +// Returns whether the assertion was successful (true) or not (false). +func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + + aType := reflect.TypeOf(expected) + bType := reflect.TypeOf(actual) + + if aType != bType { + return Fail(t, fmt.Sprintf("Types expected to match exactly\n\r\t%v != %v", aType, bType), msgAndArgs...) + } + + return Equal(t, expected, actual, msgAndArgs...) + +} + +// NotNil asserts that the specified object is not nil. +// +// assert.NotNil(t, err, "err should be something") +// +// Returns whether the assertion was successful (true) or not (false). +func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + if !isNil(object) { + return true + } + return Fail(t, "Expected value not to be nil.", msgAndArgs...) +} + +// isNil checks if a specified object is nil or not, without Failing. +func isNil(object interface{}) bool { + if object == nil { + return true + } + + value := reflect.ValueOf(object) + kind := value.Kind() + if kind >= reflect.Chan && kind <= reflect.Slice && value.IsNil() { + return true + } + + return false +} + +// Nil asserts that the specified object is nil. +// +// assert.Nil(t, err, "err should be nothing") +// +// Returns whether the assertion was successful (true) or not (false). +func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + if isNil(object) { + return true + } + return Fail(t, fmt.Sprintf("Expected nil, but got: %#v", object), msgAndArgs...) +} + +var numericZeros = []interface{}{ + int(0), + int8(0), + int16(0), + int32(0), + int64(0), + uint(0), + uint8(0), + uint16(0), + uint32(0), + uint64(0), + float32(0), + float64(0), +} + +// isEmpty gets whether the specified object is considered empty or not. +func isEmpty(object interface{}) bool { + + if object == nil { + return true + } else if object == "" { + return true + } else if object == false { + return true + } + + for _, v := range numericZeros { + if object == v { + return true + } + } + + objValue := reflect.ValueOf(object) + + switch objValue.Kind() { + case reflect.Map: + fallthrough + case reflect.Slice, reflect.Chan: + { + return (objValue.Len() == 0) + } + case reflect.Struct: + switch object.(type) { + case time.Time: + return object.(time.Time).IsZero() + } + case reflect.Ptr: + { + if objValue.IsNil() { + return true + } + switch object.(type) { + case *time.Time: + return object.(*time.Time).IsZero() + default: + return false + } + } + } + return false +} + +// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// assert.Empty(t, obj) +// +// Returns whether the assertion was successful (true) or not (false). +func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + + pass := isEmpty(object) + if !pass { + Fail(t, fmt.Sprintf("Should be empty, but was %v", object), msgAndArgs...) + } + + return pass + +} + +// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if assert.NotEmpty(t, obj) { +// assert.Equal(t, "two", obj[1]) +// } +// +// Returns whether the assertion was successful (true) or not (false). +func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + + pass := !isEmpty(object) + if !pass { + Fail(t, fmt.Sprintf("Should NOT be empty, but was %v", object), msgAndArgs...) + } + + return pass + +} + +// getLen try to get length of object. +// return (false, 0) if impossible. +func getLen(x interface{}) (ok bool, length int) { + v := reflect.ValueOf(x) + defer func() { + if e := recover(); e != nil { + ok = false + } + }() + return true, v.Len() +} + +// Len asserts that the specified object has specific length. +// Len also fails if the object has a type that len() not accept. +// +// assert.Len(t, mySlice, 3, "The size of slice is not 3") +// +// Returns whether the assertion was successful (true) or not (false). +func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) bool { + ok, l := getLen(object) + if !ok { + return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", object), msgAndArgs...) + } + + if l != length { + return Fail(t, fmt.Sprintf("\"%s\" should have %d item(s), but has %d", object, length, l), msgAndArgs...) + } + return true +} + +// True asserts that the specified value is true. +// +// assert.True(t, myBool, "myBool should be true") +// +// Returns whether the assertion was successful (true) or not (false). +func True(t TestingT, value bool, msgAndArgs ...interface{}) bool { + + if value != true { + return Fail(t, "Should be true", msgAndArgs...) + } + + return true + +} + +// False asserts that the specified value is false. +// +// assert.False(t, myBool, "myBool should be false") +// +// Returns whether the assertion was successful (true) or not (false). +func False(t TestingT, value bool, msgAndArgs ...interface{}) bool { + + if value != false { + return Fail(t, "Should be false", msgAndArgs...) + } + + return true + +} + +// NotEqual asserts that the specified values are NOT equal. +// +// assert.NotEqual(t, obj1, obj2, "two objects shouldn't be equal") +// +// Returns whether the assertion was successful (true) or not (false). +func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + + if ObjectsAreEqual(expected, actual) { + return Fail(t, fmt.Sprintf("Should not be: %#v\n", actual), msgAndArgs...) + } + + return true + +} + +// containsElement try loop over the list check if the list includes the element. +// return (false, false) if impossible. +// return (true, false) if element was not found. +// return (true, true) if element was found. +func includeElement(list interface{}, element interface{}) (ok, found bool) { + + listValue := reflect.ValueOf(list) + elementValue := reflect.ValueOf(element) + defer func() { + if e := recover(); e != nil { + ok = false + found = false + } + }() + + if reflect.TypeOf(list).Kind() == reflect.String { + return true, strings.Contains(listValue.String(), elementValue.String()) + } + + if reflect.TypeOf(list).Kind() == reflect.Map { + mapKeys := listValue.MapKeys() + for i := 0; i < len(mapKeys); i++ { + if ObjectsAreEqual(mapKeys[i].Interface(), element) { + return true, true + } + } + return true, false + } + + for i := 0; i < listValue.Len(); i++ { + if ObjectsAreEqual(listValue.Index(i).Interface(), element) { + return true, true + } + } + return true, false + +} + +// Contains asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// assert.Contains(t, "Hello World", "World", "But 'Hello World' does contain 'World'") +// assert.Contains(t, ["Hello", "World"], "World", "But ["Hello", "World"] does contain 'World'") +// assert.Contains(t, {"Hello": "World"}, "Hello", "But {'Hello': 'World'} does contain 'Hello'") +// +// Returns whether the assertion was successful (true) or not (false). +func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { + + ok, found := includeElement(s, contains) + if !ok { + return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", s), msgAndArgs...) + } + if !found { + return Fail(t, fmt.Sprintf("\"%s\" does not contain \"%s\"", s, contains), msgAndArgs...) + } + + return true + +} + +// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// assert.NotContains(t, "Hello World", "Earth", "But 'Hello World' does NOT contain 'Earth'") +// assert.NotContains(t, ["Hello", "World"], "Earth", "But ['Hello', 'World'] does NOT contain 'Earth'") +// assert.NotContains(t, {"Hello": "World"}, "Earth", "But {'Hello': 'World'} does NOT contain 'Earth'") +// +// Returns whether the assertion was successful (true) or not (false). +func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { + + ok, found := includeElement(s, contains) + if !ok { + return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", s), msgAndArgs...) + } + if found { + return Fail(t, fmt.Sprintf("\"%s\" should not contain \"%s\"", s, contains), msgAndArgs...) + } + + return true + +} + +// Condition uses a Comparison to assert a complex condition. +func Condition(t TestingT, comp Comparison, msgAndArgs ...interface{}) bool { + result := comp() + if !result { + Fail(t, "Condition failed!", msgAndArgs...) + } + return result +} + +// PanicTestFunc defines a func that should be passed to the assert.Panics and assert.NotPanics +// methods, and represents a simple func that takes no arguments, and returns nothing. +type PanicTestFunc func() + +// didPanic returns true if the function passed to it panics. Otherwise, it returns false. +func didPanic(f PanicTestFunc) (bool, interface{}) { + + didPanic := false + var message interface{} + func() { + + defer func() { + if message = recover(); message != nil { + didPanic = true + } + }() + + // call the target function + f() + + }() + + return didPanic, message + +} + +// Panics asserts that the code inside the specified PanicTestFunc panics. +// +// assert.Panics(t, func(){ +// GoCrazy() +// }, "Calling GoCrazy() should panic") +// +// Returns whether the assertion was successful (true) or not (false). +func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { + + if funcDidPanic, panicValue := didPanic(f); !funcDidPanic { + return Fail(t, fmt.Sprintf("func %#v should panic\n\r\tPanic value:\t%v", f, panicValue), msgAndArgs...) + } + + return true +} + +// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// assert.NotPanics(t, func(){ +// RemainCalm() +// }, "Calling RemainCalm() should NOT panic") +// +// Returns whether the assertion was successful (true) or not (false). +func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { + + if funcDidPanic, panicValue := didPanic(f); funcDidPanic { + return Fail(t, fmt.Sprintf("func %#v should not panic\n\r\tPanic value:\t%v", f, panicValue), msgAndArgs...) + } + + return true +} + +// WithinDuration asserts that the two times are within duration delta of each other. +// +// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second, "The difference should not be more than 10s") +// +// Returns whether the assertion was successful (true) or not (false). +func WithinDuration(t TestingT, expected, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { + + dt := expected.Sub(actual) + if dt < -delta || dt > delta { + return Fail(t, fmt.Sprintf("Max difference between %v and %v allowed is %v, but difference was %v", expected, actual, delta, dt), msgAndArgs...) + } + + return true +} + +func toFloat(x interface{}) (float64, bool) { + var xf float64 + xok := true + + switch xn := x.(type) { + case uint8: + xf = float64(xn) + case uint16: + xf = float64(xn) + case uint32: + xf = float64(xn) + case uint64: + xf = float64(xn) + case int: + xf = float64(xn) + case int8: + xf = float64(xn) + case int16: + xf = float64(xn) + case int32: + xf = float64(xn) + case int64: + xf = float64(xn) + case float32: + xf = float64(xn) + case float64: + xf = float64(xn) + default: + xok = false + } + + return xf, xok +} + +// InDelta asserts that the two numerals are within delta of each other. +// +// assert.InDelta(t, math.Pi, (22 / 7.0), 0.01) +// +// Returns whether the assertion was successful (true) or not (false). +func InDelta(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { + + af, aok := toFloat(expected) + bf, bok := toFloat(actual) + + if !aok || !bok { + return Fail(t, fmt.Sprintf("Parameters must be numerical"), msgAndArgs...) + } + + if math.IsNaN(af) { + return Fail(t, fmt.Sprintf("Actual must not be NaN"), msgAndArgs...) + } + + if math.IsNaN(bf) { + return Fail(t, fmt.Sprintf("Expected %v with delta %v, but was NaN", expected, delta), msgAndArgs...) + } + + dt := af - bf + if dt < -delta || dt > delta { + return Fail(t, fmt.Sprintf("Max difference between %v and %v allowed is %v, but difference was %v", expected, actual, delta, dt), msgAndArgs...) + } + + return true +} + +// InDeltaSlice is the same as InDelta, except it compares two slices. +func InDeltaSlice(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { + if expected == nil || actual == nil || + reflect.TypeOf(actual).Kind() != reflect.Slice || + reflect.TypeOf(expected).Kind() != reflect.Slice { + return Fail(t, fmt.Sprintf("Parameters must be slice"), msgAndArgs...) + } + + actualSlice := reflect.ValueOf(actual) + expectedSlice := reflect.ValueOf(expected) + + for i := 0; i < actualSlice.Len(); i++ { + result := InDelta(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), delta) + if !result { + return result + } + } + + return true +} + +func calcRelativeError(expected, actual interface{}) (float64, error) { + af, aok := toFloat(expected) + if !aok { + return 0, fmt.Errorf("expected value %q cannot be converted to float", expected) + } + if af == 0 { + return 0, fmt.Errorf("expected value must have a value other than zero to calculate the relative error") + } + bf, bok := toFloat(actual) + if !bok { + return 0, fmt.Errorf("expected value %q cannot be converted to float", actual) + } + + return math.Abs(af-bf) / math.Abs(af), nil +} + +// InEpsilon asserts that expected and actual have a relative error less than epsilon +// +// Returns whether the assertion was successful (true) or not (false). +func InEpsilon(t TestingT, expected, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { + actualEpsilon, err := calcRelativeError(expected, actual) + if err != nil { + return Fail(t, err.Error(), msgAndArgs...) + } + if actualEpsilon > epsilon { + return Fail(t, fmt.Sprintf("Relative error is too high: %#v (expected)\n"+ + " < %#v (actual)", actualEpsilon, epsilon), msgAndArgs...) + } + + return true +} + +// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. +func InEpsilonSlice(t TestingT, expected, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { + if expected == nil || actual == nil || + reflect.TypeOf(actual).Kind() != reflect.Slice || + reflect.TypeOf(expected).Kind() != reflect.Slice { + return Fail(t, fmt.Sprintf("Parameters must be slice"), msgAndArgs...) + } + + actualSlice := reflect.ValueOf(actual) + expectedSlice := reflect.ValueOf(expected) + + for i := 0; i < actualSlice.Len(); i++ { + result := InEpsilon(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), epsilon) + if !result { + return result + } + } + + return true +} + +/* + Errors +*/ + +// NoError asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if assert.NoError(t, err) { +// assert.Equal(t, actualObj, expectedObj) +// } +// +// Returns whether the assertion was successful (true) or not (false). +func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool { + if isNil(err) { + return true + } + + return Fail(t, fmt.Sprintf("Received unexpected error %q", err), msgAndArgs...) +} + +// Error asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if assert.Error(t, err, "An error was expected") { +// assert.Equal(t, err, expectedError) +// } +// +// Returns whether the assertion was successful (true) or not (false). +func Error(t TestingT, err error, msgAndArgs ...interface{}) bool { + + message := messageFromMsgAndArgs(msgAndArgs...) + return NotNil(t, err, "An error is expected but got nil. %s", message) + +} + +// EqualError asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// if assert.Error(t, err, "An error was expected") { +// assert.Equal(t, err, expectedError) +// } +// +// Returns whether the assertion was successful (true) or not (false). +func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) bool { + + message := messageFromMsgAndArgs(msgAndArgs...) + if !NotNil(t, theError, "An error is expected but got nil. %s", message) { + return false + } + s := "An error with value \"%s\" is expected but got \"%s\". %s" + return Equal(t, errString, theError.Error(), + s, errString, theError.Error(), message) +} + +// matchRegexp return true if a specified regexp matches a string. +func matchRegexp(rx interface{}, str interface{}) bool { + + var r *regexp.Regexp + if rr, ok := rx.(*regexp.Regexp); ok { + r = rr + } else { + r = regexp.MustCompile(fmt.Sprint(rx)) + } + + return (r.FindStringIndex(fmt.Sprint(str)) != nil) + +} + +// Regexp asserts that a specified regexp matches a string. +// +// assert.Regexp(t, regexp.MustCompile("start"), "it's starting") +// assert.Regexp(t, "start...$", "it's not starting") +// +// Returns whether the assertion was successful (true) or not (false). +func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { + + match := matchRegexp(rx, str) + + if !match { + Fail(t, fmt.Sprintf("Expect \"%v\" to match \"%v\"", str, rx), msgAndArgs...) + } + + return match +} + +// NotRegexp asserts that a specified regexp does not match a string. +// +// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") +// assert.NotRegexp(t, "^start", "it's not starting") +// +// Returns whether the assertion was successful (true) or not (false). +func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { + match := matchRegexp(rx, str) + + if match { + Fail(t, fmt.Sprintf("Expect \"%v\" to NOT match \"%v\"", str, rx), msgAndArgs...) + } + + return !match + +} + +// Zero asserts that i is the zero value for its type and returns the truth. +func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool { + if i != nil && !reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) { + return Fail(t, fmt.Sprintf("Should be zero, but was %v", i), msgAndArgs...) + } + return true +} + +// NotZero asserts that i is not the zero value for its type and returns the truth. +func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool { + if i == nil || reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) { + return Fail(t, fmt.Sprintf("Should not be zero, but was %v", i), msgAndArgs...) + } + return true +} + +// JSONEq asserts that two JSON strings are equivalent. +// +// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +// +// Returns whether the assertion was successful (true) or not (false). +func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool { + var expectedJSONAsInterface, actualJSONAsInterface interface{} + + if err := json.Unmarshal([]byte(expected), &expectedJSONAsInterface); err != nil { + return Fail(t, fmt.Sprintf("Expected value ('%s') is not valid json.\nJSON parsing error: '%s'", expected, err.Error()), msgAndArgs...) + } + + if err := json.Unmarshal([]byte(actual), &actualJSONAsInterface); err != nil { + return Fail(t, fmt.Sprintf("Input ('%s') needs to be valid json.\nJSON parsing error: '%s'", actual, err.Error()), msgAndArgs...) + } + + return Equal(t, expectedJSONAsInterface, actualJSONAsInterface, msgAndArgs...) +} + +func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) { + t := reflect.TypeOf(v) + k := t.Kind() + + if k == reflect.Ptr { + t = t.Elem() + k = t.Kind() + } + return t, k +} + +// diff returns a diff of both values as long as both are of the same type and +// are a struct, map, slice or array. Otherwise it returns an empty string. +func diff(expected interface{}, actual interface{}) string { + if expected == nil || actual == nil { + return "" + } + + et, ek := typeAndKind(expected) + at, _ := typeAndKind(actual) + + if et != at { + return "" + } + + if ek != reflect.Struct && ek != reflect.Map && ek != reflect.Slice && ek != reflect.Array { + return "" + } + + spew.Config.SortKeys = true + e := spew.Sdump(expected) + a := spew.Sdump(actual) + + diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ + A: difflib.SplitLines(e), + B: difflib.SplitLines(a), + FromFile: "Expected", + FromDate: "", + ToFile: "Actual", + ToDate: "", + Context: 1, + }) + + return "\n\nDiff:\n" + diff +} diff --git a/vendor/github.com/stretchr/testify/assert/doc.go b/vendor/github.com/stretchr/testify/assert/doc.go new file mode 100644 index 0000000..c9dccc4 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/doc.go @@ -0,0 +1,45 @@ +// Package assert provides a set of comprehensive testing tools for use with the normal Go testing system. +// +// Example Usage +// +// The following is a complete example using assert in a standard test function: +// import ( +// "testing" +// "github.com/stretchr/testify/assert" +// ) +// +// func TestSomething(t *testing.T) { +// +// var a string = "Hello" +// var b string = "Hello" +// +// assert.Equal(t, a, b, "The two words should be the same.") +// +// } +// +// if you assert many times, use the format below: +// +// import ( +// "testing" +// "github.com/stretchr/testify/assert" +// ) +// +// func TestSomething(t *testing.T) { +// assert := assert.New(t) +// +// var a string = "Hello" +// var b string = "Hello" +// +// assert.Equal(a, b, "The two words should be the same.") +// } +// +// Assertions +// +// Assertions allow you to easily write test code, and are global funcs in the `assert` package. +// All assertion functions take, as the first argument, the `*testing.T` object provided by the +// testing framework. This allows the assertion funcs to write the failings and other details to +// the correct place. +// +// Every assertion function also takes an optional string message as the final argument, +// allowing custom error messages to be appended to the message the assertion method outputs. +package assert diff --git a/vendor/github.com/stretchr/testify/assert/errors.go b/vendor/github.com/stretchr/testify/assert/errors.go new file mode 100644 index 0000000..ac9dc9d --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/errors.go @@ -0,0 +1,10 @@ +package assert + +import ( + "errors" +) + +// AnError is an error instance useful for testing. If the code does not care +// about error specifics, and only needs to return the error for example, this +// error should be used to make the test code more readable. +var AnError = errors.New("assert.AnError general error for testing") diff --git a/vendor/github.com/stretchr/testify/assert/forward_assertions.go b/vendor/github.com/stretchr/testify/assert/forward_assertions.go new file mode 100644 index 0000000..b867e95 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/forward_assertions.go @@ -0,0 +1,16 @@ +package assert + +// Assertions provides assertion methods around the +// TestingT interface. +type Assertions struct { + t TestingT +} + +// New makes a new Assertions object for the specified TestingT. +func New(t TestingT) *Assertions { + return &Assertions{ + t: t, + } +} + +//go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_forward.go.tmpl diff --git a/vendor/github.com/stretchr/testify/assert/http_assertions.go b/vendor/github.com/stretchr/testify/assert/http_assertions.go new file mode 100644 index 0000000..e1b9442 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/http_assertions.go @@ -0,0 +1,106 @@ +package assert + +import ( + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "strings" +) + +// httpCode is a helper that returns HTTP code of the response. It returns -1 +// if building a new request fails. +func httpCode(handler http.HandlerFunc, method, url string, values url.Values) int { + w := httptest.NewRecorder() + req, err := http.NewRequest(method, url+"?"+values.Encode(), nil) + if err != nil { + return -1 + } + handler(w, req) + return w.Code +} + +// HTTPSuccess asserts that a specified handler returns a success status code. +// +// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, values url.Values) bool { + code := httpCode(handler, method, url, values) + if code == -1 { + return false + } + return code >= http.StatusOK && code <= http.StatusPartialContent +} + +// HTTPRedirect asserts that a specified handler returns a redirect status code. +// +// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, values url.Values) bool { + code := httpCode(handler, method, url, values) + if code == -1 { + return false + } + return code >= http.StatusMultipleChoices && code <= http.StatusTemporaryRedirect +} + +// HTTPError asserts that a specified handler returns an error status code. +// +// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values url.Values) bool { + code := httpCode(handler, method, url, values) + if code == -1 { + return false + } + return code >= http.StatusBadRequest +} + +// HTTPBody is a helper that returns HTTP body of the response. It returns +// empty string if building a new request fails. +func HTTPBody(handler http.HandlerFunc, method, url string, values url.Values) string { + w := httptest.NewRecorder() + req, err := http.NewRequest(method, url+"?"+values.Encode(), nil) + if err != nil { + return "" + } + handler(w, req) + return w.Body.String() +} + +// HTTPBodyContains asserts that a specified handler returns a +// body that contains a string. +// +// assert.HTTPBodyContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}) bool { + body := HTTPBody(handler, method, url, values) + + contains := strings.Contains(body, fmt.Sprint(str)) + if !contains { + Fail(t, fmt.Sprintf("Expected response body for \"%s\" to contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body)) + } + + return contains +} + +// HTTPBodyNotContains asserts that a specified handler returns a +// body that does not contain a string. +// +// assert.HTTPBodyNotContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}) bool { + body := HTTPBody(handler, method, url, values) + + contains := strings.Contains(body, fmt.Sprint(str)) + if contains { + Fail(t, "Expected response body for %s to NOT contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body) + } + + return !contains +} diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/LICENSE b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/LICENSE new file mode 100644 index 0000000..2a7cfd2 --- /dev/null +++ b/vendor/github.com/stretchr/testify/vendor/github.com/davecgh/go-spew/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2012-2013 Dave Collins + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/LICENSE b/vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/LICENSE new file mode 100644 index 0000000..c67dad6 --- /dev/null +++ b/vendor/github.com/stretchr/testify/vendor/github.com/pmezard/go-difflib/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013, Patrick Mezard +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + The names of its contributors may not be used to endorse or promote +products derived from this software without specific prior written +permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/LICENSE.md b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/LICENSE.md new file mode 100644 index 0000000..2199945 --- /dev/null +++ b/vendor/github.com/stretchr/testify/vendor/github.com/stretchr/objx/LICENSE.md @@ -0,0 +1,23 @@ +objx - by Mat Ryer and Tyler Bunnell + +The MIT License (MIT) + +Copyright (c) 2014 Stretchr, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/yawn/envmap/.travis.yml b/vendor/github.com/yawn/envmap/.travis.yml new file mode 100644 index 0000000..4f2ee4d --- /dev/null +++ b/vendor/github.com/yawn/envmap/.travis.yml @@ -0,0 +1 @@ +language: go diff --git a/vendor/github.com/yawn/envmap/README.md b/vendor/github.com/yawn/envmap/README.md new file mode 100644 index 0000000..10010f3 --- /dev/null +++ b/vendor/github.com/yawn/envmap/README.md @@ -0,0 +1,5 @@ +# envmap + +[![Build Status](https://travis-ci.org/yawn/envmap.svg)](https://travis-ci.org/yawn/envmap) + +`envmap` provides access to environment variables as a `map[string]string` and can convert those maps back to `[]string`. diff --git a/vendor/github.com/yawn/envmap/envmap.go b/vendor/github.com/yawn/envmap/envmap.go new file mode 100644 index 0000000..27460cf --- /dev/null +++ b/vendor/github.com/yawn/envmap/envmap.go @@ -0,0 +1,67 @@ +package envmap + +import ( + "os" + "strings" +) + +const separator = "=" + +// Envmap is a mapping of environment keys to values +type Envmap map[string]string + +// Export exports the keys and values defined in this Envmap to the +// actual environment +func (e Envmap) Export() { + + for k, v := range e { + os.Setenv(k, v) + } + +} + +// Import creates an Envmap from the actual environment. +func Import() Envmap { + return ToMap(os.Environ()) +} + +// Join builds a environment variable declaration out of seperate +// key and value strings +func Join(k, v string) string { + return strings.Join([]string{k, v}, separator) +} + +// ToEnv converts a map of environment variables to a slice +// of key=value strings +func (e Envmap) ToEnv() (env []string) { + + for k, v := range e { + env = append(env, Join(k, v)) + } + + return + +} + +// ToMap converts a slice of environment variables to a map +// of environment variables +func ToMap(env []string) (m Envmap) { + + m = make(map[string]string) + + for _, e := range env { + + s := strings.Split(e, separator) + + var ( + key = s[0] + val = strings.Join(s[1:], separator) + ) + + m[key] = val + + } + + return + +}