From bfca71018ef145da5050e0561208c4163f01f8cd Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Tue, 28 Mar 2017 10:42:03 +0200 Subject: [PATCH 01/13] base64/md5/substr --- README.md | 225 ++++++++++++++++++++-------------------------- dynaml/base64.go | 40 +++++++++ dynaml/call.go | 11 +++ dynaml/md5.go | 22 +++++ dynaml/substr.go | 45 ++++++++++ flow/flow_test.go | 116 ++++++++++++++++++++++++ 6 files changed, 330 insertions(+), 129 deletions(-) create mode 100644 dynaml/base64.go create mode 100644 dynaml/md5.go create mode 100644 dynaml/substr.go diff --git a/README.md b/README.md index 78b5ada..8ba90fe 100644 --- a/README.md +++ b/README.md @@ -1,37 +1,21 @@ ``` - _ __ __ - ___ _ __ (_)/ _|/ _| _ _ - / __| '_ \| | |_| |_ _| |_ _| |_ - \__ \ |_) | | _| _|_ _|_ _| - |___/ .__/|_|_| |_| |_| |_| - |_| + ___ _ __ (_)/ _|/ _| + / __| '_ \| | |_| |_ + \__ \ |_) | | _| _| + |___/ .__/|_|_| |_| + |_| ``` --- -**NOTE**: *Active development on spiff is currently paused, including Pull Requests. `spiff++` is a fork of spiff that provides a compatible extension to spiff based on the latest version offering a rich set of new features not yet available in spiff. All fixes provided by the original spiff project will be incorporated into spiff++, also. Because there will be no way back to the spiff source base a new independent spiff++ repository has been created to continue development of spiff++.* ---- - -*spiff* is a command line tool and declarative in-domain hybrid YAML templating system. While regular templating systems process a template file by substituting the template expressions by values taken from -external data sources, in-domain means that the templating engine knows about the syntax and structure of the processed template. It therefore can take the values for the template expressions directly -from the document processed, including those parts denoted by the template expressions itself. +**NOTE**: *Active development on spiff is currently paused, including Pull Requests. Very severe issues will be addressed, and we will still be actively responding to requests for help via Issues.* -For example: -```yaml -resource: - name: bosh deployment - version: 25 - url: (( "http://resource.location/bosh?version=" version )) - description: (( "This document describes a " name " located at " url )) -``` - -Hybrid mean that the template processing is not restricted to the template itself. Additionally -*spiff* is able to merge the template with information from additional yaml files, so-called stubs, that again may contain template expressions. +--- +spiff is a command line tool and declarative YAML templating system, specially designed for generating BOSH deployment manifests. Contents: - - [Installation](#installation) - [Usage](#usage) - [dynaml Templating Language](#dynaml-templating-language) @@ -79,13 +63,15 @@ Contents: - [(( index(list, "foobar") ))](#-indexlist-foobar-) - [(( lastindex(list, "foobar") ))](#-lastindexlist-foobar-) - [(( replace(string, "foo", "bar") ))](#-replacestring-foo-bar-) + - [(( substr(string, 1, 3) ))](#-substrstring-1-3-) - [(( match("(f.*)(b.*)", "xxxfoobar") ))](#-matchfb-xxxfoobar-) - [(( length(list) ))](#-lengthlist-) + - [(( base64(string) ))](#-base64string-) + - [(( md5(string) ))](#-md5string-) - [(( defined(foobar) ))](#-definedfoobar-) - [(( valid(foobar) ))](#-validfoobar-) - [(( require(foobar) ))](#-requirefoobar-) - [(( stub(foo.bar) ))](#-stubfoobar-) - - [(( type(expr) ))](#-typeexpr-) - [(( exec( "command", arg1, arg2) ))](#-exec-command-arg1-arg2-) - [(( eval( foo "." bar ) ))](#-eval-foo--bar--) - [(( env( "HOME" ) ))](#-env-HOME--) @@ -108,8 +94,6 @@ Contents: - [(( sum[map|initial|sum,key,value|->dynaml-expr] ))](#-summapinitialsumkeyvalue-dynaml-expr-) - [Templates](#templates) - [<<: (( &template ))](#--template-) - - [- <<: (( &template ))](#----template-) - - [foo: (( &template (expression) ))](#foo--template-expression-) - [(( *foo.bar ))](#-foobar-) - [Special Literals](#special-literals) - [Access to evaluation context](#access-to-evaluation-context) @@ -122,7 +106,7 @@ Contents: # Installation -Official release executable binaries can be downloaded via [Github releases](https://github.com/mandelsoft/spiff/releases) for Darwin and Linux machines (and virtual machines). +Official release executable binaries can be downloaded via [Github releases](https://github.com/cloudfoundry-incubator/spiff/releases) for Darwin and Linux machines (and virtual machines). Some of spiff's dependencies have changed since the last official release, and spiff will not be updated to keep up with these dependencies. Working dependencies are vendored in the `Godeps` directory (more information on the `godep` tool is available [here](https://github.com/tools/godep)). As such, trying to `go get` spiff will likely fail; the only supported way to use spiff is to use an official binary release. @@ -148,25 +132,18 @@ It is possible to read one file from standard input by using the file name `-`. Show structural differences between two deployment manifests. -Unlike basic diffing tools and even `bosh diff`, this command has semantic -knowledge of a deployment manifest, and is not just text-based. For example, -if two manifests are the same except they have some jobs listed in different -orders, `spiff diff` will detect this, since job order matters in a manifest. -On the other hand, if two manifests differ only in the order of their -resource pools, for instance, then it will yield and empty diff since -resource pool order doesn't actually matter for a deployment. +Unlike 'bosh diff', this command has semantic knowledge of a deployment +manifest, and is not just text-based. It also doesn't modify either file. -Also unlike `bosh diff`, this command doesn't modify either file. - -It's tailored for checking differences between one deployment and the next. +It's tailed for checking differences between one deployment and the next. Typical flow: ```sh -$ spiff merge template.yml [templates...] > upgrade.yml +$ spiff merge template.yml [templates...] > deployment.yml $ bosh download manifest [deployment] current.yml -$ spiff diff upgrade.yml current.yml -$ bosh deployment upgrade.yml +$ spiff diff deployment.yml current.yml +$ bosh deployment deployment.yml $ bosh deploy ``` @@ -887,8 +864,7 @@ The result is the string `3 times 2 yields 6`. ## `(( "10.10.10.10" - 11 ))` -Besides arithmetic on integers it is also possible to use addition and -subtraction on ip addresses, or multiplication and division on CIDRs. +Besides arithmetic on integers it is also possible to use addition and subtraction on ip addresses. e.g.: @@ -904,39 +880,6 @@ ip: 10.10.10.10 range: 10.10.10.10-10.11.11.1 ``` -Subtraction also works on two IP addresses to calculate the number of -IP addresses between two IP addresses. - -e.g.: - -```yaml -diff: (( 10.0.1.0 - 10.0.0.1 + 1 )) -``` - -yields the value 256. IP address constants can be directly used in dynaml -expressions. They are implicitly converted to strings and back to IP -addresses if required by an operation. - -Multiplication and division can be used to handle IP range shifts on CIDRs. -With division a network can be partioned. The network size is increased -to allow at least a dedicated number of subnets below the original CIDR. -Multiplication then can be used to get the n-th next subnet of the same -size. - -e.g.: - -```yaml -subnet: (( "10.1.2.1/24" / 12 )) # first subnet CIDR for 16 subnets -next: (( "10.1.2.1/24" / 12 * 2)) # 2nd next (3rd) subnet CIDRS -``` - -yields - -```yaml -subnet: 10.1.2.0/28 -next: 10.1.2.32/28 -``` - Additionally there are functions working on IPv4 CIDRs: ```yaml @@ -955,6 +898,19 @@ next: 192.168.1.0 num: 192.168.0.0+256=192.168.1.0 ``` +Subtraction also works on two IP addresses to calculate the number of +IP addresses between two IP addresses. + +e.g.: + +```yaml +diff: (( 10.0.1.0 - 10.0.0.1 + 1 )) +``` + +yields the value 256. IP address constants can be directly used in dynaml +expressions. They are implicitly converted to strings and back to IP +addresses if required by an operation. + ## `(( a > 1 ? foo :bar ))` Dynaml supports the comparison operators `<`, `<=`, `==`, `!=`, `>=` and `>`. The comparison operators work on @@ -1230,6 +1186,29 @@ string: (( replace("foobar", "o", "u") )) yields `fuubar`. +### `(( substr(string, 1, 2) ))` + +Extract a stub string from a string, starting from a given start index up to an optional end index (exclusive). If no end index is given the sub struvt up to the end of the string is extracted. +Both indices might be negative. In this case they are taken from the end of the string. + +e.g.: + +```yaml +string: "foobar" +end1: (( substr(string,-2) )) +end2: (( substr(string,3) )) +range: (( substr(string,1,-1) )) +``` + +evaluates to + +```yaml +string: foobar +end1: ar +end2: bar +range: ooba +``` + ### `(( match("(f.*)(b.*)", "xxxfoobar") ))` Returns the match of a regular expression for a given string value. The match is a list of the matched values for the sub expressions contained in the regular expression. Index 0 refers to the match of the complete regular expression. If the string value does not match an empty list is returned. @@ -1271,6 +1250,40 @@ list: length: 2 ``` +### `(( base64(string) ))` + +The function `base64` generates a base64 encoding of a given string. `base64_decode` decodes a base64 encoded string. + +e.g.: + +```yaml +base64: (( base64("test") )) +test: (( base64_decode(base64))) +``` + +evaluates to + +```yaml +base54: dGVzdA== +test: test +``` + +### `(( md5(string) ))` + +The function `md5` generates an md5 hash for the given string. + +e.g.: + +```yaml +hash: (( md5("test") )) +``` + +evaluates to + +```yaml +hash: 098f6bcd4621d373cade4e832627b4f6 +``` + ### `(( defined(foobar) ))` The function `defined` checks whether an expression can successfully be evaluated. It yields the boolean value `true`, if the expression can be evaluated, and `false` otherwise. @@ -1377,21 +1390,6 @@ The argument passed to this function must either be a reference literal or an ex Alternatively the `merge` operation could be used, for example `merge foo.bar`. The difference is that `stub` does not merge, therefore the field will still be merged (with the original path in the document). -### `(( type(expr) ))` - -Return the type of a dynaml expression. The expression must evaluate without error. The following type values are returned: - -| type | type name | -|----------|-----------| -| integer | int | -| boolean | bool | -| string | string | -| map | map | -| list | list | -| template value | template | -| function | lambda | -| nil/~ | nil | -| ~~ | undef | ### `(( exec( "command", arg1, arg2) ))` @@ -1753,39 +1751,15 @@ In contrast to the previous `makemap` flavor, this one could also be handled by ### `(( merge(map1, map2) ))` -Beside the keyword ` merge` there is also a function called `merge` (It must always be followed by an opening bracket). It can be used to merge severals maps taken from the actual document. - -If the maps are specified by reference expressions, they cannot contain any _dynaml_ expressions, because they are always evaluated in the context of the actual document before evaluating the arguments. +Beside the keyword ` merge` there is also a function called `merge` (It must always be followed by an opensing bracket). It can be used to merge severals maps taken from the actual document. If the maps are specified by reference expressions, they cannot contain +any _dynaml_ expressions, because they are always evaluated in the context of the actual document before evaluating the arguments. e.g.: ```yaml map1: alice: 24 - bob: (( alice )) -map2: - alice: 26 - peter: 8 -result: (( merge(map1,map2) )) -``` - -resolves `result` to - -```yaml -result: - alice: 26 - bob: 24 # <---- expression evaluated before mergeing -``` - -Alternatively map [templates](#templates) can be passed (without evaluation operator!). In this case the _dynaml_ expressions from the template are evaluated while merging the given documents as for regular calls of _spiff merge_. - -e.g.: - -```yaml -map1: - <<: (( &template )) - alice: 24 - bob: (( alice )) + bob: 25 map2: alice: 26 peter: 8 @@ -1797,7 +1771,7 @@ resolves `result` to ```yaml result: alice: 26 - bob: 26 # <---- expression evaluate during merging + bob: 25 ``` A map might also be given by a map expression. Here it is possible to specify @@ -2143,7 +2117,7 @@ sum: 49 ## Templates -A maps, lists or even single values can be tagged by a dynaml expression to be used as template. Dynaml expressions in a template are not evaluated at its definition location in the document, but can be inserted at other locations using dynaml. +A map can be tagged by a dynaml expression to be used as template. Dynaml expressions in a template are not evaluated at its definition location in the document, but can be inserted at other locations using dynaml. At every usage location it is evaluated separately. ### `<<: (( &template ))` @@ -2162,12 +2136,7 @@ foo: The template will be the value of the node `foo.bar`. As such it can be overwritten as a whole by settings in a stub during the merge process. Dynaml expressions in the template are not evaluated. A map can have only a single `<<` field. Therefore it is possible to combine the template marker with an expression just by adding the expression in parenthesis. -### `- <<: (( &template ))` - Adding `- <<: (( &template ))` to a list it is also possible to define list templates. - -### `foo: (( &template (expression) ))` - It is also possible to convert a single expression value into a simple template by adding the template marker to the expression, for example `foo: (( &template (expression) ))` @@ -2213,8 +2182,6 @@ use: verb: hates ``` -Templates can also be passed to the [merge](#-mergemap1-map2-) function to preserve the _dynaml_ expressions inside the map for use by the merge function. - ## Special Literals ### `(( {} ))` @@ -2639,7 +2606,7 @@ networks: - alice: 25 people: - - alice: 13 + - alice: 24 ``` To request an auto-merge of the structure resulting from the expression evaluation, the expression has to be preceeded with the modifier `prefer` (`(( prefer women men ))`). This would yield the desired result: @@ -2651,7 +2618,7 @@ networks: - alice: 25 people: - - alice: 13 + - alice: 24 - bob: 24 ``` diff --git a/dynaml/base64.go b/dynaml/base64.go new file mode 100644 index 0000000..f4b2790 --- /dev/null +++ b/dynaml/base64.go @@ -0,0 +1,40 @@ +package dynaml + +import ( + "encoding/base64" +) + +func func_base64(arguments []interface{}, binding Binding) (interface{}, EvaluationInfo, bool) { + info := DefaultInfo() + + if len(arguments) != 1 { + return info.Error("base64 takes exactly one argument") + } + + str, ok := arguments[0].(string) + if !ok { + return info.Error("first argument for base64 must be a string") + } + + result := base64.StdEncoding.EncodeToString([]byte(str)) + return result, info, true +} + +func func_base64_decode(arguments []interface{}, binding Binding) (interface{}, EvaluationInfo, bool) { + info := DefaultInfo() + + if len(arguments) != 1 { + return info.Error("base64_decode takes exactly one argument") + } + + str, ok := arguments[0].(string) + if !ok { + return info.Error("first argument for base64_decode must be a string") + } + + result, err := base64.StdEncoding.DecodeString(str) + if err != nil { + return info.Error("cannot decode string") + } + return string(result), info, true +} diff --git a/dynaml/call.go b/dynaml/call.go index 72087fc..cb8e6d4 100644 --- a/dynaml/call.go +++ b/dynaml/call.go @@ -145,6 +145,17 @@ func (e CallExpr) Evaluate(binding Binding, locally bool) (interface{}, Evaluati case "merge": result, sub, ok = func_merge(values, binding) + case "base64": + result, sub, ok = func_base64(values, binding) + case "base64_decode": + result, sub, ok = func_base64_decode(values, binding) + + case "md5": + result, sub, ok = func_md5(values, binding) + + case "substr": + result, sub, ok = func_substr(values, binding) + case "type": if info.Undefined { info.Undefined = false diff --git a/dynaml/md5.go b/dynaml/md5.go new file mode 100644 index 0000000..280fbac --- /dev/null +++ b/dynaml/md5.go @@ -0,0 +1,22 @@ +package dynaml + +import ( + "crypto/md5" + "fmt" +) + +func func_md5(arguments []interface{}, binding Binding) (interface{}, EvaluationInfo, bool) { + info := DefaultInfo() + + if len(arguments) != 1 { + return info.Error("md5 takes exactly 2 arguments") + } + + str, ok := arguments[0].(string) + if !ok { + return info.Error("first argument for md5 must be a string") + } + + result := md5.Sum([]byte(str)) + return fmt.Sprintf("%x", result), info, true +} diff --git a/dynaml/substr.go b/dynaml/substr.go new file mode 100644 index 0000000..1a94131 --- /dev/null +++ b/dynaml/substr.go @@ -0,0 +1,45 @@ +package dynaml + +import () + +func func_substr(arguments []interface{}, binding Binding) (interface{}, EvaluationInfo, bool) { + info := DefaultInfo() + + if len(arguments) > 3 || len(arguments) < 2 { + return info.Error("substr takes two to three arguments") + } + + str, ok := arguments[0].(string) + if !ok { + return info.Error("first argument for substr must be a string") + } + start, ok := arguments[1].(int64) + if !ok { + return info.Error("second argument for substr must be an intenger") + } + if start < 0 { + start = int64(len(str)) + start + } + var end int64 = int64(len(str)) + if len(arguments) >= 3 { + end, ok = arguments[2].(int64) + if !ok { + return info.Error("third argument for substr must be an integer") + } + if end < 0 { + end = int64(len(str)) + end + } + } + + if int64(len(str)) < end { + return info.Error("substr effective end index (%d) exceeds string length (%d)", end, len(str)) + } + if start < 0 { + return info.Error("negative substr effective start index (%d)", start) + } + if start > end { + return info.Error("substr start index (%d) aftsre end index (%d) ", start, end) + } + + return str[start:end], info, true +} diff --git a/flow/flow_test.go b/flow/flow_test.go index 312121f..d920862 100644 --- a/flow/flow_test.go +++ b/flow/flow_test.go @@ -5710,4 +5710,120 @@ data: {} Expect(source).To(FlowAs(resolved)) }) }) + + Describe("when calling base64", func() { + Context("doing encoding", func() { + It("it encodes a string", func() { + source := parseYAML(` +--- +value: (( base64("test") )) +`) + resolved := parseYAML(` +--- +value: dGVzdA== +`) + Expect(source).To(FlowAs(resolved)) + }) + }) + Context("doing decoding", func() { + It("it decodes a string", func() { + source := parseYAML(` +--- +value: (( base64_decode("dGVzdA==") )) +`) + resolved := parseYAML(` +--- +value: test +`) + Expect(source).To(FlowAs(resolved)) + }) + }) + }) + + Describe("when calling md5", func() { + It("it encodesgenerates md5 hash of a string", func() { + source := parseYAML(` +--- +value: (( md5("test") )) +`) + resolved := parseYAML(` +--- +value: 098f6bcd4621d373cade4e832627b4f6 +`) + Expect(source).To(FlowAs(resolved)) + }) + }) + + Describe("when calling substr", func() { + Context("with 2 args", func() { + It("it handles positive start index", func() { + source := parseYAML(` +--- +value: (( substr("test",1) )) +`) + resolved := parseYAML(` +--- +value: est +`) + Expect(source).To(FlowAs(resolved)) + }) + It("it handles negative start index", func() { + source := parseYAML(` +--- +value: (( substr("test",-1) )) +`) + resolved := parseYAML(` +--- +value: t +`) + Expect(source).To(FlowAs(resolved)) + }) + }) + Context("with 3 args", func() { + It("it handles positive start index", func() { + source := parseYAML(` +--- +value: (( substr("test",1,3) )) +`) + resolved := parseYAML(` +--- +value: es +`) + Expect(source).To(FlowAs(resolved)) + }) + It("it handles negative start index", func() { + source := parseYAML(` +--- +value: (( substr("test",-2,3) )) +`) + resolved := parseYAML(` +--- +value: s +`) + Expect(source).To(FlowAs(resolved)) + }) + It("it handles positive start index with negative end index", func() { + source := parseYAML(` +--- +value: (( substr("test",1,-1) )) +`) + resolved := parseYAML(` +--- +value: es +`) + Expect(source).To(FlowAs(resolved)) + }) + It("it handles negative start index with negative end index", func() { + source := parseYAML(` +--- +value: (( substr("test",-2,-1) )) +`) + resolved := parseYAML(` +--- +value: s +`) + Expect(source).To(FlowAs(resolved)) + }) + }) + }) }) From aa93a1f1c9f06d275734c6d940f3df0c052666db Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Mon, 22 May 2017 17:43:15 +0200 Subject: [PATCH 02/13] projections --- README.md | 82 ++- dynaml/dynaml.peg | 5 +- dynaml/dynaml.peg.go | 1507 ++++++++++++++++++++++-------------------- dynaml/parser.go | 12 + dynaml/projection.go | 96 +++ flow/flow_test.go | 247 +++++++ 6 files changed, 1239 insertions(+), 710 deletions(-) create mode 100644 dynaml/projection.go diff --git a/README.md b/README.md index 8ba90fe..dbe2cfc 100644 --- a/README.md +++ b/README.md @@ -92,6 +92,9 @@ Contents: - [(( sum[list|initial|sum,elem|->dynaml-expr] ))](#-sumlistinitialsumelem-dynaml-expr-) - [(( sum[list|initial|sum,idx,elem|->dynaml-expr] ))](#-sumlistinitialsumidxelem-dynaml-expr-) - [(( sum[map|initial|sum,key,value|->dynaml-expr] ))](#-summapinitialsumkeyvalue-dynaml-expr-) + - [Projections](#projections) + - [(( expr.[*].value ))](#-exprvalue-) + - [(( list.[1..2].value ))](#-list12value-) - [Templates](#templates) - [<<: (( &template ))](#--template-) - [(( *foo.bar ))](#-foobar-) @@ -2115,6 +2118,83 @@ ages: sum: 49 ``` +## Projections + +Projections work over the elements of a list or map yielding a result list. Hereby every element is mapped by an optional subsequent reference expression. This may contain again projections, dynamic references or lambda calls. Basically this is a simplified form of the more general [mapping](#mappings) yielding a list working with a lambda function using only a reference expression based on the elements. + +### `(( expr.[*].value ))` + +All elements of a map or list given by the expression `expr` are dereferenced with the subsequent reference expression (here `.expr`). If this expression works on a map the elements are ordered accoring to their key values. If the subsequent reference expression is omitted, the complete value list isreturned. For a list expression this means the identity operation. + +e.g.: + +```yaml +list: + - name: alice + age: 25 + - name: bob + age: 26 + - name: peter + age: 24 + +names: (( list.[*].name )) +``` + +yields for `names`: + +```yaml +names: + - alice + - bob + - peter +``` + +or for maps: + +```yaml +networks: + ext: + cidr: 10.8.0.0/16 + zone1: + cidr: 10.9.0.0/16 + +cidrs: (( .networks.[*].cidr )) +``` + +yields for `cidrs`: + +```yaml +cidrs: + - 10.8.0.0/16 + - 10.9.0.0/16 +``` + +### `(( list.[1..2].value ))` + +This projection flavor only works for lists. The projection is done for a dedicated slice of the initial list. + +e.g.: + +```yaml +list: + - name: alice + age: 25 + - name: bob + age: 26 + - name: peter + age: 24 + +names: (( list.[1..2].name )) +``` + +yields for `names`: + +```yaml +names: + - bob + - peter +``` + ## Templates A map can be tagged by a dynaml expression to be used as template. Dynaml expressions in a template are not evaluated at its definition location in the document, but can be inserted at other locations using dynaml. @@ -2124,7 +2204,7 @@ At every usage location it is evaluated separately. The dynaml expression `&template` can be used to tag a map node as template: -i.g.: +e.g.: ```yaml foo: diff --git a/dynaml/dynaml.peg b/dynaml/dynaml.peg index 0d267be..d7ab513 100644 --- a/dynaml/dynaml.peg +++ b/dynaml/dynaml.peg @@ -42,7 +42,7 @@ Level0 <- IP / String / Integer / Boolean / Undefined / Nil / Not / Substitution / Merge / Auto / Lambda / Chained Chained <- ( Mapping / Sum / List / Map / Range / Grouped / Reference ) ChainedQualifiedExpression* -ChainedQualifiedExpression <- ChainedCall / ( '.' ( ChainedRef / ChainedDynRef / Slice ) ) +ChainedQualifiedExpression <- ChainedCall / ( '.' ( ChainedRef / ChainedDynRef / Projection) ) ChainedRef <- ( Key / Index ) FollowUpRef ChainedDynRef <- '[' Expression ']' Slice <- Range @@ -50,6 +50,9 @@ ChainedCall <- '(' Arguments ')' Arguments <- Expression (NextExpression)* NextExpression <- ',' Expression +Projection <- ( '[*]' / Slice ) ProjectionValue ChainedQualifiedExpression* +ProjectionValue <- {} + Substitution <- '*' Level0 Not <- '!' ws Level0 Grouped <- '(' Expression ')' diff --git a/dynaml/dynaml.peg.go b/dynaml/dynaml.peg.go index 38bc3a2..4e460e0 100644 --- a/dynaml/dynaml.peg.go +++ b/dynaml/dynaml.peg.go @@ -49,6 +49,8 @@ const ( ruleChainedCall ruleArguments ruleNextExpression + ruleProjection + ruleProjectionValue ruleSubstitution ruleNot ruleGrouped @@ -85,6 +87,7 @@ const ( ruleIP rulews rulereq_ws + ruleAction0 rulePre ruleIn @@ -128,6 +131,8 @@ var rul3s = [...]string{ "ChainedCall", "Arguments", "NextExpression", + "Projection", + "ProjectionValue", "Substitution", "Not", "Grouped", @@ -164,6 +169,7 @@ var rul3s = [...]string{ "IP", "ws", "req_ws", + "Action0", "Pre_", "_In_", @@ -482,7 +488,7 @@ func (t *tokens32) Expand(index int) tokenTree { type DynamlGrammar struct { Buffer string buffer []rune - rules [72]func() bool + rules [75]func() bool Parse func(rule ...int) error Reset func() Pretty bool @@ -557,6 +563,18 @@ func (p *DynamlGrammar) Highlighter() { p.tokenTree.PrintSyntax() } +func (p *DynamlGrammar) Execute() { + buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 + for token := range p.tokenTree.Tokens() { + switch token.pegRule { + + case ruleAction0: + + } + } + _, _, _, _, _ = buffer, _buffer, text, begin, end +} + func (p *DynamlGrammar) Init() { p.buffer = []rune(p.Buffer) if len(p.buffer) == 0 || p.buffer[len(p.buffer)-1] != endSymbol { @@ -1710,7 +1728,7 @@ func (p *DynamlGrammar) Init() { position, tokenIndex, depth = position107, tokenIndex107, depth107 return false }, - /* 28 ChainedQualifiedExpression <- <(ChainedCall / ('.' (ChainedRef / ChainedDynRef / Slice)))> */ + /* 28 ChainedQualifiedExpression <- <(ChainedCall / ('.' (ChainedRef / ChainedDynRef / Projection)))> */ func() bool { position118, tokenIndex118, depth118 := position, tokenIndex, depth { @@ -1742,7 +1760,7 @@ func (p *DynamlGrammar) Init() { goto l122 l124: position, tokenIndex, depth = position122, tokenIndex122, depth122 - if !_rules[ruleSlice]() { + if !_rules[ruleProjection]() { goto l118 } } @@ -1902,1655 +1920,1728 @@ func (p *DynamlGrammar) Init() { position, tokenIndex, depth = position139, tokenIndex139, depth139 return false }, - /* 35 Substitution <- <('*' Level0)> */ + /* 35 Projection <- <((('[' '*' ']') / Slice) ProjectionValue ChainedQualifiedExpression*)> */ func() bool { position141, tokenIndex141, depth141 := position, tokenIndex, depth { position142 := position depth++ - if buffer[position] != rune('*') { - goto l141 + { + position143, tokenIndex143, depth143 := position, tokenIndex, depth + if buffer[position] != rune('[') { + goto l144 + } + position++ + if buffer[position] != rune('*') { + goto l144 + } + position++ + if buffer[position] != rune(']') { + goto l144 + } + position++ + goto l143 + l144: + position, tokenIndex, depth = position143, tokenIndex143, depth143 + if !_rules[ruleSlice]() { + goto l141 + } } - position++ - if !_rules[ruleLevel0]() { + l143: + if !_rules[ruleProjectionValue]() { goto l141 } + l145: + { + position146, tokenIndex146, depth146 := position, tokenIndex, depth + if !_rules[ruleChainedQualifiedExpression]() { + goto l146 + } + goto l145 + l146: + position, tokenIndex, depth = position146, tokenIndex146, depth146 + } depth-- - add(ruleSubstitution, position142) + add(ruleProjection, position142) } return true l141: position, tokenIndex, depth = position141, tokenIndex141, depth141 return false }, - /* 36 Not <- <('!' ws Level0)> */ + /* 36 ProjectionValue <- */ func() bool { - position143, tokenIndex143, depth143 := position, tokenIndex, depth + position147, tokenIndex147, depth147 := position, tokenIndex, depth { - position144 := position + position148 := position + depth++ + if !_rules[ruleAction0]() { + goto l147 + } + depth-- + add(ruleProjectionValue, position148) + } + return true + l147: + position, tokenIndex, depth = position147, tokenIndex147, depth147 + return false + }, + /* 37 Substitution <- <('*' Level0)> */ + func() bool { + position149, tokenIndex149, depth149 := position, tokenIndex, depth + { + position150 := position + depth++ + if buffer[position] != rune('*') { + goto l149 + } + position++ + if !_rules[ruleLevel0]() { + goto l149 + } + depth-- + add(ruleSubstitution, position150) + } + return true + l149: + position, tokenIndex, depth = position149, tokenIndex149, depth149 + return false + }, + /* 38 Not <- <('!' ws Level0)> */ + func() bool { + position151, tokenIndex151, depth151 := position, tokenIndex, depth + { + position152 := position depth++ if buffer[position] != rune('!') { - goto l143 + goto l151 } position++ if !_rules[rulews]() { - goto l143 + goto l151 } if !_rules[ruleLevel0]() { - goto l143 + goto l151 } depth-- - add(ruleNot, position144) + add(ruleNot, position152) } return true - l143: - position, tokenIndex, depth = position143, tokenIndex143, depth143 + l151: + position, tokenIndex, depth = position151, tokenIndex151, depth151 return false }, - /* 37 Grouped <- <('(' Expression ')')> */ + /* 39 Grouped <- <('(' Expression ')')> */ func() bool { - position145, tokenIndex145, depth145 := position, tokenIndex, depth + position153, tokenIndex153, depth153 := position, tokenIndex, depth { - position146 := position + position154 := position depth++ if buffer[position] != rune('(') { - goto l145 + goto l153 } position++ if !_rules[ruleExpression]() { - goto l145 + goto l153 } if buffer[position] != rune(')') { - goto l145 + goto l153 } position++ depth-- - add(ruleGrouped, position146) + add(ruleGrouped, position154) } return true - l145: - position, tokenIndex, depth = position145, tokenIndex145, depth145 + l153: + position, tokenIndex, depth = position153, tokenIndex153, depth153 return false }, - /* 38 Range <- <('[' Expression ('.' '.') Expression ']')> */ + /* 40 Range <- <('[' Expression ('.' '.') Expression ']')> */ func() bool { - position147, tokenIndex147, depth147 := position, tokenIndex, depth + position155, tokenIndex155, depth155 := position, tokenIndex, depth { - position148 := position + position156 := position depth++ if buffer[position] != rune('[') { - goto l147 + goto l155 } position++ if !_rules[ruleExpression]() { - goto l147 + goto l155 } if buffer[position] != rune('.') { - goto l147 + goto l155 } position++ if buffer[position] != rune('.') { - goto l147 + goto l155 } position++ if !_rules[ruleExpression]() { - goto l147 + goto l155 } if buffer[position] != rune(']') { - goto l147 + goto l155 } position++ depth-- - add(ruleRange, position148) + add(ruleRange, position156) } return true - l147: - position, tokenIndex, depth = position147, tokenIndex147, depth147 + l155: + position, tokenIndex, depth = position155, tokenIndex155, depth155 return false }, - /* 39 Integer <- <('-'? [0-9] ([0-9] / '_')*)> */ + /* 41 Integer <- <('-'? [0-9] ([0-9] / '_')*)> */ func() bool { - position149, tokenIndex149, depth149 := position, tokenIndex, depth + position157, tokenIndex157, depth157 := position, tokenIndex, depth { - position150 := position + position158 := position depth++ { - position151, tokenIndex151, depth151 := position, tokenIndex, depth + position159, tokenIndex159, depth159 := position, tokenIndex, depth if buffer[position] != rune('-') { - goto l151 + goto l159 } position++ - goto l152 - l151: - position, tokenIndex, depth = position151, tokenIndex151, depth151 + goto l160 + l159: + position, tokenIndex, depth = position159, tokenIndex159, depth159 } - l152: + l160: if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l149 + goto l157 } position++ - l153: + l161: { - position154, tokenIndex154, depth154 := position, tokenIndex, depth + position162, tokenIndex162, depth162 := position, tokenIndex, depth { - position155, tokenIndex155, depth155 := position, tokenIndex, depth + position163, tokenIndex163, depth163 := position, tokenIndex, depth if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l156 + goto l164 } position++ - goto l155 - l156: - position, tokenIndex, depth = position155, tokenIndex155, depth155 + goto l163 + l164: + position, tokenIndex, depth = position163, tokenIndex163, depth163 if buffer[position] != rune('_') { - goto l154 + goto l162 } position++ } - l155: - goto l153 - l154: - position, tokenIndex, depth = position154, tokenIndex154, depth154 + l163: + goto l161 + l162: + position, tokenIndex, depth = position162, tokenIndex162, depth162 } depth-- - add(ruleInteger, position150) + add(ruleInteger, position158) } return true - l149: - position, tokenIndex, depth = position149, tokenIndex149, depth149 + l157: + position, tokenIndex, depth = position157, tokenIndex157, depth157 return false }, - /* 40 String <- <('"' (('\\' '"') / (!'"' .))* '"')> */ + /* 42 String <- <('"' (('\\' '"') / (!'"' .))* '"')> */ func() bool { - position157, tokenIndex157, depth157 := position, tokenIndex, depth + position165, tokenIndex165, depth165 := position, tokenIndex, depth { - position158 := position + position166 := position depth++ if buffer[position] != rune('"') { - goto l157 + goto l165 } position++ - l159: + l167: { - position160, tokenIndex160, depth160 := position, tokenIndex, depth + position168, tokenIndex168, depth168 := position, tokenIndex, depth { - position161, tokenIndex161, depth161 := position, tokenIndex, depth + position169, tokenIndex169, depth169 := position, tokenIndex, depth if buffer[position] != rune('\\') { - goto l162 + goto l170 } position++ if buffer[position] != rune('"') { - goto l162 + goto l170 } position++ - goto l161 - l162: - position, tokenIndex, depth = position161, tokenIndex161, depth161 + goto l169 + l170: + position, tokenIndex, depth = position169, tokenIndex169, depth169 { - position163, tokenIndex163, depth163 := position, tokenIndex, depth + position171, tokenIndex171, depth171 := position, tokenIndex, depth if buffer[position] != rune('"') { - goto l163 + goto l171 } position++ - goto l160 - l163: - position, tokenIndex, depth = position163, tokenIndex163, depth163 + goto l168 + l171: + position, tokenIndex, depth = position171, tokenIndex171, depth171 } if !matchDot() { - goto l160 + goto l168 } } - l161: - goto l159 - l160: - position, tokenIndex, depth = position160, tokenIndex160, depth160 + l169: + goto l167 + l168: + position, tokenIndex, depth = position168, tokenIndex168, depth168 } if buffer[position] != rune('"') { - goto l157 + goto l165 } position++ depth-- - add(ruleString, position158) + add(ruleString, position166) } return true - l157: - position, tokenIndex, depth = position157, tokenIndex157, depth157 + l165: + position, tokenIndex, depth = position165, tokenIndex165, depth165 return false }, - /* 41 Boolean <- <(('t' 'r' 'u' 'e') / ('f' 'a' 'l' 's' 'e'))> */ + /* 43 Boolean <- <(('t' 'r' 'u' 'e') / ('f' 'a' 'l' 's' 'e'))> */ func() bool { - position164, tokenIndex164, depth164 := position, tokenIndex, depth + position172, tokenIndex172, depth172 := position, tokenIndex, depth { - position165 := position + position173 := position depth++ { - position166, tokenIndex166, depth166 := position, tokenIndex, depth + position174, tokenIndex174, depth174 := position, tokenIndex, depth if buffer[position] != rune('t') { - goto l167 + goto l175 } position++ if buffer[position] != rune('r') { - goto l167 + goto l175 } position++ if buffer[position] != rune('u') { - goto l167 + goto l175 } position++ if buffer[position] != rune('e') { - goto l167 + goto l175 } position++ - goto l166 - l167: - position, tokenIndex, depth = position166, tokenIndex166, depth166 + goto l174 + l175: + position, tokenIndex, depth = position174, tokenIndex174, depth174 if buffer[position] != rune('f') { - goto l164 + goto l172 } position++ if buffer[position] != rune('a') { - goto l164 + goto l172 } position++ if buffer[position] != rune('l') { - goto l164 + goto l172 } position++ if buffer[position] != rune('s') { - goto l164 + goto l172 } position++ if buffer[position] != rune('e') { - goto l164 + goto l172 } position++ } - l166: + l174: depth-- - add(ruleBoolean, position165) + add(ruleBoolean, position173) } return true - l164: - position, tokenIndex, depth = position164, tokenIndex164, depth164 + l172: + position, tokenIndex, depth = position172, tokenIndex172, depth172 return false }, - /* 42 Nil <- <(('n' 'i' 'l') / '~')> */ + /* 44 Nil <- <(('n' 'i' 'l') / '~')> */ func() bool { - position168, tokenIndex168, depth168 := position, tokenIndex, depth + position176, tokenIndex176, depth176 := position, tokenIndex, depth { - position169 := position + position177 := position depth++ { - position170, tokenIndex170, depth170 := position, tokenIndex, depth + position178, tokenIndex178, depth178 := position, tokenIndex, depth if buffer[position] != rune('n') { - goto l171 + goto l179 } position++ if buffer[position] != rune('i') { - goto l171 + goto l179 } position++ if buffer[position] != rune('l') { - goto l171 + goto l179 } position++ - goto l170 - l171: - position, tokenIndex, depth = position170, tokenIndex170, depth170 + goto l178 + l179: + position, tokenIndex, depth = position178, tokenIndex178, depth178 if buffer[position] != rune('~') { - goto l168 + goto l176 } position++ } - l170: + l178: depth-- - add(ruleNil, position169) + add(ruleNil, position177) } return true - l168: - position, tokenIndex, depth = position168, tokenIndex168, depth168 + l176: + position, tokenIndex, depth = position176, tokenIndex176, depth176 return false }, - /* 43 Undefined <- <('~' '~')> */ + /* 45 Undefined <- <('~' '~')> */ func() bool { - position172, tokenIndex172, depth172 := position, tokenIndex, depth + position180, tokenIndex180, depth180 := position, tokenIndex, depth { - position173 := position + position181 := position depth++ if buffer[position] != rune('~') { - goto l172 + goto l180 } position++ if buffer[position] != rune('~') { - goto l172 + goto l180 } position++ depth-- - add(ruleUndefined, position173) + add(ruleUndefined, position181) } return true - l172: - position, tokenIndex, depth = position172, tokenIndex172, depth172 + l180: + position, tokenIndex, depth = position180, tokenIndex180, depth180 return false }, - /* 44 List <- <('[' Contents? ']')> */ + /* 46 List <- <('[' Contents? ']')> */ func() bool { - position174, tokenIndex174, depth174 := position, tokenIndex, depth + position182, tokenIndex182, depth182 := position, tokenIndex, depth { - position175 := position + position183 := position depth++ if buffer[position] != rune('[') { - goto l174 + goto l182 } position++ { - position176, tokenIndex176, depth176 := position, tokenIndex, depth + position184, tokenIndex184, depth184 := position, tokenIndex, depth if !_rules[ruleContents]() { - goto l176 + goto l184 } - goto l177 - l176: - position, tokenIndex, depth = position176, tokenIndex176, depth176 + goto l185 + l184: + position, tokenIndex, depth = position184, tokenIndex184, depth184 } - l177: + l185: if buffer[position] != rune(']') { - goto l174 + goto l182 } position++ depth-- - add(ruleList, position175) + add(ruleList, position183) } return true - l174: - position, tokenIndex, depth = position174, tokenIndex174, depth174 + l182: + position, tokenIndex, depth = position182, tokenIndex182, depth182 return false }, - /* 45 Contents <- <(Expression NextExpression*)> */ + /* 47 Contents <- <(Expression NextExpression*)> */ func() bool { - position178, tokenIndex178, depth178 := position, tokenIndex, depth + position186, tokenIndex186, depth186 := position, tokenIndex, depth { - position179 := position + position187 := position depth++ if !_rules[ruleExpression]() { - goto l178 + goto l186 } - l180: + l188: { - position181, tokenIndex181, depth181 := position, tokenIndex, depth + position189, tokenIndex189, depth189 := position, tokenIndex, depth if !_rules[ruleNextExpression]() { - goto l181 + goto l189 } - goto l180 - l181: - position, tokenIndex, depth = position181, tokenIndex181, depth181 + goto l188 + l189: + position, tokenIndex, depth = position189, tokenIndex189, depth189 } depth-- - add(ruleContents, position179) + add(ruleContents, position187) } return true - l178: - position, tokenIndex, depth = position178, tokenIndex178, depth178 + l186: + position, tokenIndex, depth = position186, tokenIndex186, depth186 return false }, - /* 46 Map <- <(CreateMap ws Assignments? '}')> */ + /* 48 Map <- <(CreateMap ws Assignments? '}')> */ func() bool { - position182, tokenIndex182, depth182 := position, tokenIndex, depth + position190, tokenIndex190, depth190 := position, tokenIndex, depth { - position183 := position + position191 := position depth++ if !_rules[ruleCreateMap]() { - goto l182 + goto l190 } if !_rules[rulews]() { - goto l182 + goto l190 } { - position184, tokenIndex184, depth184 := position, tokenIndex, depth + position192, tokenIndex192, depth192 := position, tokenIndex, depth if !_rules[ruleAssignments]() { - goto l184 + goto l192 } - goto l185 - l184: - position, tokenIndex, depth = position184, tokenIndex184, depth184 + goto l193 + l192: + position, tokenIndex, depth = position192, tokenIndex192, depth192 } - l185: + l193: if buffer[position] != rune('}') { - goto l182 + goto l190 } position++ depth-- - add(ruleMap, position183) + add(ruleMap, position191) } return true - l182: - position, tokenIndex, depth = position182, tokenIndex182, depth182 + l190: + position, tokenIndex, depth = position190, tokenIndex190, depth190 return false }, - /* 47 CreateMap <- <'{'> */ + /* 49 CreateMap <- <'{'> */ func() bool { - position186, tokenIndex186, depth186 := position, tokenIndex, depth + position194, tokenIndex194, depth194 := position, tokenIndex, depth { - position187 := position + position195 := position depth++ if buffer[position] != rune('{') { - goto l186 + goto l194 } position++ depth-- - add(ruleCreateMap, position187) + add(ruleCreateMap, position195) } return true - l186: - position, tokenIndex, depth = position186, tokenIndex186, depth186 + l194: + position, tokenIndex, depth = position194, tokenIndex194, depth194 return false }, - /* 48 Assignments <- <(Assignment (',' Assignment)*)> */ + /* 50 Assignments <- <(Assignment (',' Assignment)*)> */ func() bool { - position188, tokenIndex188, depth188 := position, tokenIndex, depth + position196, tokenIndex196, depth196 := position, tokenIndex, depth { - position189 := position + position197 := position depth++ if !_rules[ruleAssignment]() { - goto l188 + goto l196 } - l190: + l198: { - position191, tokenIndex191, depth191 := position, tokenIndex, depth + position199, tokenIndex199, depth199 := position, tokenIndex, depth if buffer[position] != rune(',') { - goto l191 + goto l199 } position++ if !_rules[ruleAssignment]() { - goto l191 + goto l199 } - goto l190 - l191: - position, tokenIndex, depth = position191, tokenIndex191, depth191 + goto l198 + l199: + position, tokenIndex, depth = position199, tokenIndex199, depth199 } depth-- - add(ruleAssignments, position189) + add(ruleAssignments, position197) } return true - l188: - position, tokenIndex, depth = position188, tokenIndex188, depth188 + l196: + position, tokenIndex, depth = position196, tokenIndex196, depth196 return false }, - /* 49 Assignment <- <(Expression '=' Expression)> */ + /* 51 Assignment <- <(Expression '=' Expression)> */ func() bool { - position192, tokenIndex192, depth192 := position, tokenIndex, depth + position200, tokenIndex200, depth200 := position, tokenIndex, depth { - position193 := position + position201 := position depth++ if !_rules[ruleExpression]() { - goto l192 + goto l200 } if buffer[position] != rune('=') { - goto l192 + goto l200 } position++ if !_rules[ruleExpression]() { - goto l192 + goto l200 } depth-- - add(ruleAssignment, position193) + add(ruleAssignment, position201) } return true - l192: - position, tokenIndex, depth = position192, tokenIndex192, depth192 + l200: + position, tokenIndex, depth = position200, tokenIndex200, depth200 return false }, - /* 50 Merge <- <(RefMerge / SimpleMerge)> */ + /* 52 Merge <- <(RefMerge / SimpleMerge)> */ func() bool { - position194, tokenIndex194, depth194 := position, tokenIndex, depth + position202, tokenIndex202, depth202 := position, tokenIndex, depth { - position195 := position + position203 := position depth++ { - position196, tokenIndex196, depth196 := position, tokenIndex, depth + position204, tokenIndex204, depth204 := position, tokenIndex, depth if !_rules[ruleRefMerge]() { - goto l197 + goto l205 } - goto l196 - l197: - position, tokenIndex, depth = position196, tokenIndex196, depth196 + goto l204 + l205: + position, tokenIndex, depth = position204, tokenIndex204, depth204 if !_rules[ruleSimpleMerge]() { - goto l194 + goto l202 } } - l196: + l204: depth-- - add(ruleMerge, position195) + add(ruleMerge, position203) } return true - l194: - position, tokenIndex, depth = position194, tokenIndex194, depth194 + l202: + position, tokenIndex, depth = position202, tokenIndex202, depth202 return false }, - /* 51 RefMerge <- <('m' 'e' 'r' 'g' 'e' !(req_ws Required) (req_ws (Replace / On))? req_ws Reference)> */ + /* 53 RefMerge <- <('m' 'e' 'r' 'g' 'e' !(req_ws Required) (req_ws (Replace / On))? req_ws Reference)> */ func() bool { - position198, tokenIndex198, depth198 := position, tokenIndex, depth + position206, tokenIndex206, depth206 := position, tokenIndex, depth { - position199 := position + position207 := position depth++ if buffer[position] != rune('m') { - goto l198 + goto l206 } position++ if buffer[position] != rune('e') { - goto l198 + goto l206 } position++ if buffer[position] != rune('r') { - goto l198 + goto l206 } position++ if buffer[position] != rune('g') { - goto l198 + goto l206 } position++ if buffer[position] != rune('e') { - goto l198 + goto l206 } position++ { - position200, tokenIndex200, depth200 := position, tokenIndex, depth + position208, tokenIndex208, depth208 := position, tokenIndex, depth if !_rules[rulereq_ws]() { - goto l200 + goto l208 } if !_rules[ruleRequired]() { - goto l200 + goto l208 } - goto l198 - l200: - position, tokenIndex, depth = position200, tokenIndex200, depth200 + goto l206 + l208: + position, tokenIndex, depth = position208, tokenIndex208, depth208 } { - position201, tokenIndex201, depth201 := position, tokenIndex, depth + position209, tokenIndex209, depth209 := position, tokenIndex, depth if !_rules[rulereq_ws]() { - goto l201 + goto l209 } { - position203, tokenIndex203, depth203 := position, tokenIndex, depth + position211, tokenIndex211, depth211 := position, tokenIndex, depth if !_rules[ruleReplace]() { - goto l204 + goto l212 } - goto l203 - l204: - position, tokenIndex, depth = position203, tokenIndex203, depth203 + goto l211 + l212: + position, tokenIndex, depth = position211, tokenIndex211, depth211 if !_rules[ruleOn]() { - goto l201 + goto l209 } } - l203: - goto l202 - l201: - position, tokenIndex, depth = position201, tokenIndex201, depth201 + l211: + goto l210 + l209: + position, tokenIndex, depth = position209, tokenIndex209, depth209 } - l202: + l210: if !_rules[rulereq_ws]() { - goto l198 + goto l206 } if !_rules[ruleReference]() { - goto l198 + goto l206 } depth-- - add(ruleRefMerge, position199) + add(ruleRefMerge, position207) } return true - l198: - position, tokenIndex, depth = position198, tokenIndex198, depth198 + l206: + position, tokenIndex, depth = position206, tokenIndex206, depth206 return false }, - /* 52 SimpleMerge <- <('m' 'e' 'r' 'g' 'e' !'(' (req_ws (Replace / Required / On))?)> */ + /* 54 SimpleMerge <- <('m' 'e' 'r' 'g' 'e' !'(' (req_ws (Replace / Required / On))?)> */ func() bool { - position205, tokenIndex205, depth205 := position, tokenIndex, depth + position213, tokenIndex213, depth213 := position, tokenIndex, depth { - position206 := position + position214 := position depth++ if buffer[position] != rune('m') { - goto l205 + goto l213 } position++ if buffer[position] != rune('e') { - goto l205 + goto l213 } position++ if buffer[position] != rune('r') { - goto l205 + goto l213 } position++ if buffer[position] != rune('g') { - goto l205 + goto l213 } position++ if buffer[position] != rune('e') { - goto l205 + goto l213 } position++ { - position207, tokenIndex207, depth207 := position, tokenIndex, depth + position215, tokenIndex215, depth215 := position, tokenIndex, depth if buffer[position] != rune('(') { - goto l207 + goto l215 } position++ - goto l205 - l207: - position, tokenIndex, depth = position207, tokenIndex207, depth207 + goto l213 + l215: + position, tokenIndex, depth = position215, tokenIndex215, depth215 } { - position208, tokenIndex208, depth208 := position, tokenIndex, depth + position216, tokenIndex216, depth216 := position, tokenIndex, depth if !_rules[rulereq_ws]() { - goto l208 + goto l216 } { - position210, tokenIndex210, depth210 := position, tokenIndex, depth + position218, tokenIndex218, depth218 := position, tokenIndex, depth if !_rules[ruleReplace]() { - goto l211 + goto l219 } - goto l210 - l211: - position, tokenIndex, depth = position210, tokenIndex210, depth210 + goto l218 + l219: + position, tokenIndex, depth = position218, tokenIndex218, depth218 if !_rules[ruleRequired]() { - goto l212 + goto l220 } - goto l210 - l212: - position, tokenIndex, depth = position210, tokenIndex210, depth210 + goto l218 + l220: + position, tokenIndex, depth = position218, tokenIndex218, depth218 if !_rules[ruleOn]() { - goto l208 + goto l216 } } - l210: - goto l209 - l208: - position, tokenIndex, depth = position208, tokenIndex208, depth208 + l218: + goto l217 + l216: + position, tokenIndex, depth = position216, tokenIndex216, depth216 } - l209: + l217: depth-- - add(ruleSimpleMerge, position206) + add(ruleSimpleMerge, position214) } return true - l205: - position, tokenIndex, depth = position205, tokenIndex205, depth205 + l213: + position, tokenIndex, depth = position213, tokenIndex213, depth213 return false }, - /* 53 Replace <- <('r' 'e' 'p' 'l' 'a' 'c' 'e')> */ + /* 55 Replace <- <('r' 'e' 'p' 'l' 'a' 'c' 'e')> */ func() bool { - position213, tokenIndex213, depth213 := position, tokenIndex, depth + position221, tokenIndex221, depth221 := position, tokenIndex, depth { - position214 := position + position222 := position depth++ if buffer[position] != rune('r') { - goto l213 + goto l221 } position++ if buffer[position] != rune('e') { - goto l213 + goto l221 } position++ if buffer[position] != rune('p') { - goto l213 + goto l221 } position++ if buffer[position] != rune('l') { - goto l213 + goto l221 } position++ if buffer[position] != rune('a') { - goto l213 + goto l221 } position++ if buffer[position] != rune('c') { - goto l213 + goto l221 } position++ if buffer[position] != rune('e') { - goto l213 + goto l221 } position++ depth-- - add(ruleReplace, position214) + add(ruleReplace, position222) } return true - l213: - position, tokenIndex, depth = position213, tokenIndex213, depth213 + l221: + position, tokenIndex, depth = position221, tokenIndex221, depth221 return false }, - /* 54 Required <- <('r' 'e' 'q' 'u' 'i' 'r' 'e' 'd')> */ + /* 56 Required <- <('r' 'e' 'q' 'u' 'i' 'r' 'e' 'd')> */ func() bool { - position215, tokenIndex215, depth215 := position, tokenIndex, depth + position223, tokenIndex223, depth223 := position, tokenIndex, depth { - position216 := position + position224 := position depth++ if buffer[position] != rune('r') { - goto l215 + goto l223 } position++ if buffer[position] != rune('e') { - goto l215 + goto l223 } position++ if buffer[position] != rune('q') { - goto l215 + goto l223 } position++ if buffer[position] != rune('u') { - goto l215 + goto l223 } position++ if buffer[position] != rune('i') { - goto l215 + goto l223 } position++ if buffer[position] != rune('r') { - goto l215 + goto l223 } position++ if buffer[position] != rune('e') { - goto l215 + goto l223 } position++ if buffer[position] != rune('d') { - goto l215 + goto l223 } position++ depth-- - add(ruleRequired, position216) + add(ruleRequired, position224) } return true - l215: - position, tokenIndex, depth = position215, tokenIndex215, depth215 + l223: + position, tokenIndex, depth = position223, tokenIndex223, depth223 return false }, - /* 55 On <- <('o' 'n' req_ws Name)> */ + /* 57 On <- <('o' 'n' req_ws Name)> */ func() bool { - position217, tokenIndex217, depth217 := position, tokenIndex, depth + position225, tokenIndex225, depth225 := position, tokenIndex, depth { - position218 := position + position226 := position depth++ if buffer[position] != rune('o') { - goto l217 + goto l225 } position++ if buffer[position] != rune('n') { - goto l217 + goto l225 } position++ if !_rules[rulereq_ws]() { - goto l217 + goto l225 } if !_rules[ruleName]() { - goto l217 + goto l225 } depth-- - add(ruleOn, position218) + add(ruleOn, position226) } return true - l217: - position, tokenIndex, depth = position217, tokenIndex217, depth217 + l225: + position, tokenIndex, depth = position225, tokenIndex225, depth225 return false }, - /* 56 Auto <- <('a' 'u' 't' 'o')> */ + /* 58 Auto <- <('a' 'u' 't' 'o')> */ func() bool { - position219, tokenIndex219, depth219 := position, tokenIndex, depth + position227, tokenIndex227, depth227 := position, tokenIndex, depth { - position220 := position + position228 := position depth++ if buffer[position] != rune('a') { - goto l219 + goto l227 } position++ if buffer[position] != rune('u') { - goto l219 + goto l227 } position++ if buffer[position] != rune('t') { - goto l219 + goto l227 } position++ if buffer[position] != rune('o') { - goto l219 + goto l227 } position++ depth-- - add(ruleAuto, position220) + add(ruleAuto, position228) } return true - l219: - position, tokenIndex, depth = position219, tokenIndex219, depth219 + l227: + position, tokenIndex, depth = position227, tokenIndex227, depth227 return false }, - /* 57 Mapping <- <('m' 'a' 'p' '[' Level7 (LambdaExpr / ('|' Expression)) ']')> */ + /* 59 Mapping <- <('m' 'a' 'p' '[' Level7 (LambdaExpr / ('|' Expression)) ']')> */ func() bool { - position221, tokenIndex221, depth221 := position, tokenIndex, depth + position229, tokenIndex229, depth229 := position, tokenIndex, depth { - position222 := position + position230 := position depth++ if buffer[position] != rune('m') { - goto l221 + goto l229 } position++ if buffer[position] != rune('a') { - goto l221 + goto l229 } position++ if buffer[position] != rune('p') { - goto l221 + goto l229 } position++ if buffer[position] != rune('[') { - goto l221 + goto l229 } position++ if !_rules[ruleLevel7]() { - goto l221 + goto l229 } { - position223, tokenIndex223, depth223 := position, tokenIndex, depth + position231, tokenIndex231, depth231 := position, tokenIndex, depth if !_rules[ruleLambdaExpr]() { - goto l224 + goto l232 } - goto l223 - l224: - position, tokenIndex, depth = position223, tokenIndex223, depth223 + goto l231 + l232: + position, tokenIndex, depth = position231, tokenIndex231, depth231 if buffer[position] != rune('|') { - goto l221 + goto l229 } position++ if !_rules[ruleExpression]() { - goto l221 + goto l229 } } - l223: + l231: if buffer[position] != rune(']') { - goto l221 + goto l229 } position++ depth-- - add(ruleMapping, position222) + add(ruleMapping, position230) } return true - l221: - position, tokenIndex, depth = position221, tokenIndex221, depth221 + l229: + position, tokenIndex, depth = position229, tokenIndex229, depth229 return false }, - /* 58 Sum <- <('s' 'u' 'm' '[' Level7 '|' Level7 (LambdaExpr / ('|' Expression)) ']')> */ + /* 60 Sum <- <('s' 'u' 'm' '[' Level7 '|' Level7 (LambdaExpr / ('|' Expression)) ']')> */ func() bool { - position225, tokenIndex225, depth225 := position, tokenIndex, depth + position233, tokenIndex233, depth233 := position, tokenIndex, depth { - position226 := position + position234 := position depth++ if buffer[position] != rune('s') { - goto l225 + goto l233 } position++ if buffer[position] != rune('u') { - goto l225 + goto l233 } position++ if buffer[position] != rune('m') { - goto l225 + goto l233 } position++ if buffer[position] != rune('[') { - goto l225 + goto l233 } position++ if !_rules[ruleLevel7]() { - goto l225 + goto l233 } if buffer[position] != rune('|') { - goto l225 + goto l233 } position++ if !_rules[ruleLevel7]() { - goto l225 + goto l233 } { - position227, tokenIndex227, depth227 := position, tokenIndex, depth + position235, tokenIndex235, depth235 := position, tokenIndex, depth if !_rules[ruleLambdaExpr]() { - goto l228 + goto l236 } - goto l227 - l228: - position, tokenIndex, depth = position227, tokenIndex227, depth227 + goto l235 + l236: + position, tokenIndex, depth = position235, tokenIndex235, depth235 if buffer[position] != rune('|') { - goto l225 + goto l233 } position++ if !_rules[ruleExpression]() { - goto l225 + goto l233 } } - l227: + l235: if buffer[position] != rune(']') { - goto l225 + goto l233 } position++ depth-- - add(ruleSum, position226) + add(ruleSum, position234) } return true - l225: - position, tokenIndex, depth = position225, tokenIndex225, depth225 + l233: + position, tokenIndex, depth = position233, tokenIndex233, depth233 return false }, - /* 59 Lambda <- <('l' 'a' 'm' 'b' 'd' 'a' (LambdaRef / LambdaExpr))> */ + /* 61 Lambda <- <('l' 'a' 'm' 'b' 'd' 'a' (LambdaRef / LambdaExpr))> */ func() bool { - position229, tokenIndex229, depth229 := position, tokenIndex, depth + position237, tokenIndex237, depth237 := position, tokenIndex, depth { - position230 := position + position238 := position depth++ if buffer[position] != rune('l') { - goto l229 + goto l237 } position++ if buffer[position] != rune('a') { - goto l229 + goto l237 } position++ if buffer[position] != rune('m') { - goto l229 + goto l237 } position++ if buffer[position] != rune('b') { - goto l229 + goto l237 } position++ if buffer[position] != rune('d') { - goto l229 + goto l237 } position++ if buffer[position] != rune('a') { - goto l229 + goto l237 } position++ { - position231, tokenIndex231, depth231 := position, tokenIndex, depth + position239, tokenIndex239, depth239 := position, tokenIndex, depth if !_rules[ruleLambdaRef]() { - goto l232 + goto l240 } - goto l231 - l232: - position, tokenIndex, depth = position231, tokenIndex231, depth231 + goto l239 + l240: + position, tokenIndex, depth = position239, tokenIndex239, depth239 if !_rules[ruleLambdaExpr]() { - goto l229 + goto l237 } } - l231: + l239: depth-- - add(ruleLambda, position230) + add(ruleLambda, position238) } return true - l229: - position, tokenIndex, depth = position229, tokenIndex229, depth229 + l237: + position, tokenIndex, depth = position237, tokenIndex237, depth237 return false }, - /* 60 LambdaRef <- <(req_ws Expression)> */ + /* 62 LambdaRef <- <(req_ws Expression)> */ func() bool { - position233, tokenIndex233, depth233 := position, tokenIndex, depth + position241, tokenIndex241, depth241 := position, tokenIndex, depth { - position234 := position + position242 := position depth++ if !_rules[rulereq_ws]() { - goto l233 + goto l241 } if !_rules[ruleExpression]() { - goto l233 + goto l241 } depth-- - add(ruleLambdaRef, position234) + add(ruleLambdaRef, position242) } return true - l233: - position, tokenIndex, depth = position233, tokenIndex233, depth233 + l241: + position, tokenIndex, depth = position241, tokenIndex241, depth241 return false }, - /* 61 LambdaExpr <- <(ws '|' ws Name NextName* ws '|' ws ('-' '>') Expression)> */ + /* 63 LambdaExpr <- <(ws '|' ws Name NextName* ws '|' ws ('-' '>') Expression)> */ func() bool { - position235, tokenIndex235, depth235 := position, tokenIndex, depth + position243, tokenIndex243, depth243 := position, tokenIndex, depth { - position236 := position + position244 := position depth++ if !_rules[rulews]() { - goto l235 + goto l243 } if buffer[position] != rune('|') { - goto l235 + goto l243 } position++ if !_rules[rulews]() { - goto l235 + goto l243 } if !_rules[ruleName]() { - goto l235 + goto l243 } - l237: + l245: { - position238, tokenIndex238, depth238 := position, tokenIndex, depth + position246, tokenIndex246, depth246 := position, tokenIndex, depth if !_rules[ruleNextName]() { - goto l238 + goto l246 } - goto l237 - l238: - position, tokenIndex, depth = position238, tokenIndex238, depth238 + goto l245 + l246: + position, tokenIndex, depth = position246, tokenIndex246, depth246 } if !_rules[rulews]() { - goto l235 + goto l243 } if buffer[position] != rune('|') { - goto l235 + goto l243 } position++ if !_rules[rulews]() { - goto l235 + goto l243 } if buffer[position] != rune('-') { - goto l235 + goto l243 } position++ if buffer[position] != rune('>') { - goto l235 + goto l243 } position++ if !_rules[ruleExpression]() { - goto l235 + goto l243 } depth-- - add(ruleLambdaExpr, position236) + add(ruleLambdaExpr, position244) } return true - l235: - position, tokenIndex, depth = position235, tokenIndex235, depth235 + l243: + position, tokenIndex, depth = position243, tokenIndex243, depth243 return false }, - /* 62 NextName <- <(ws ',' ws Name)> */ + /* 64 NextName <- <(ws ',' ws Name)> */ func() bool { - position239, tokenIndex239, depth239 := position, tokenIndex, depth + position247, tokenIndex247, depth247 := position, tokenIndex, depth { - position240 := position + position248 := position depth++ if !_rules[rulews]() { - goto l239 + goto l247 } if buffer[position] != rune(',') { - goto l239 + goto l247 } position++ if !_rules[rulews]() { - goto l239 + goto l247 } if !_rules[ruleName]() { - goto l239 + goto l247 } depth-- - add(ruleNextName, position240) + add(ruleNextName, position248) } return true - l239: - position, tokenIndex, depth = position239, tokenIndex239, depth239 + l247: + position, tokenIndex, depth = position247, tokenIndex247, depth247 return false }, - /* 63 Name <- <([a-z] / [A-Z] / [0-9] / '_')+> */ + /* 65 Name <- <([a-z] / [A-Z] / [0-9] / '_')+> */ func() bool { - position241, tokenIndex241, depth241 := position, tokenIndex, depth + position249, tokenIndex249, depth249 := position, tokenIndex, depth { - position242 := position + position250 := position depth++ { - position245, tokenIndex245, depth245 := position, tokenIndex, depth + position253, tokenIndex253, depth253 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l246 + goto l254 } position++ - goto l245 - l246: - position, tokenIndex, depth = position245, tokenIndex245, depth245 + goto l253 + l254: + position, tokenIndex, depth = position253, tokenIndex253, depth253 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l247 + goto l255 } position++ - goto l245 - l247: - position, tokenIndex, depth = position245, tokenIndex245, depth245 + goto l253 + l255: + position, tokenIndex, depth = position253, tokenIndex253, depth253 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l248 + goto l256 } position++ - goto l245 - l248: - position, tokenIndex, depth = position245, tokenIndex245, depth245 + goto l253 + l256: + position, tokenIndex, depth = position253, tokenIndex253, depth253 if buffer[position] != rune('_') { - goto l241 + goto l249 } position++ } - l245: - l243: + l253: + l251: { - position244, tokenIndex244, depth244 := position, tokenIndex, depth + position252, tokenIndex252, depth252 := position, tokenIndex, depth { - position249, tokenIndex249, depth249 := position, tokenIndex, depth + position257, tokenIndex257, depth257 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l250 + goto l258 } position++ - goto l249 - l250: - position, tokenIndex, depth = position249, tokenIndex249, depth249 + goto l257 + l258: + position, tokenIndex, depth = position257, tokenIndex257, depth257 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l251 + goto l259 } position++ - goto l249 - l251: - position, tokenIndex, depth = position249, tokenIndex249, depth249 + goto l257 + l259: + position, tokenIndex, depth = position257, tokenIndex257, depth257 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l252 + goto l260 } position++ - goto l249 - l252: - position, tokenIndex, depth = position249, tokenIndex249, depth249 + goto l257 + l260: + position, tokenIndex, depth = position257, tokenIndex257, depth257 if buffer[position] != rune('_') { - goto l244 + goto l252 } position++ } - l249: - goto l243 - l244: - position, tokenIndex, depth = position244, tokenIndex244, depth244 + l257: + goto l251 + l252: + position, tokenIndex, depth = position252, tokenIndex252, depth252 } depth-- - add(ruleName, position242) + add(ruleName, position250) } return true - l241: - position, tokenIndex, depth = position241, tokenIndex241, depth241 + l249: + position, tokenIndex, depth = position249, tokenIndex249, depth249 return false }, - /* 64 Reference <- <('.'? Key FollowUpRef)> */ + /* 66 Reference <- <('.'? Key FollowUpRef)> */ func() bool { - position253, tokenIndex253, depth253 := position, tokenIndex, depth + position261, tokenIndex261, depth261 := position, tokenIndex, depth { - position254 := position + position262 := position depth++ { - position255, tokenIndex255, depth255 := position, tokenIndex, depth + position263, tokenIndex263, depth263 := position, tokenIndex, depth if buffer[position] != rune('.') { - goto l255 + goto l263 } position++ - goto l256 - l255: - position, tokenIndex, depth = position255, tokenIndex255, depth255 + goto l264 + l263: + position, tokenIndex, depth = position263, tokenIndex263, depth263 } - l256: + l264: if !_rules[ruleKey]() { - goto l253 + goto l261 } if !_rules[ruleFollowUpRef]() { - goto l253 + goto l261 } depth-- - add(ruleReference, position254) + add(ruleReference, position262) } return true - l253: - position, tokenIndex, depth = position253, tokenIndex253, depth253 + l261: + position, tokenIndex, depth = position261, tokenIndex261, depth261 return false }, - /* 65 FollowUpRef <- <('.' (Key / Index))*> */ + /* 67 FollowUpRef <- <('.' (Key / Index))*> */ func() bool { { - position258 := position + position266 := position depth++ - l259: + l267: { - position260, tokenIndex260, depth260 := position, tokenIndex, depth + position268, tokenIndex268, depth268 := position, tokenIndex, depth if buffer[position] != rune('.') { - goto l260 + goto l268 } position++ { - position261, tokenIndex261, depth261 := position, tokenIndex, depth + position269, tokenIndex269, depth269 := position, tokenIndex, depth if !_rules[ruleKey]() { - goto l262 + goto l270 } - goto l261 - l262: - position, tokenIndex, depth = position261, tokenIndex261, depth261 + goto l269 + l270: + position, tokenIndex, depth = position269, tokenIndex269, depth269 if !_rules[ruleIndex]() { - goto l260 + goto l268 } } - l261: - goto l259 - l260: - position, tokenIndex, depth = position260, tokenIndex260, depth260 + l269: + goto l267 + l268: + position, tokenIndex, depth = position268, tokenIndex268, depth268 } depth-- - add(ruleFollowUpRef, position258) + add(ruleFollowUpRef, position266) } return true }, - /* 66 Key <- <(([a-z] / [A-Z] / [0-9] / '_') ([a-z] / [A-Z] / [0-9] / '_' / '-')* (':' ([a-z] / [A-Z] / [0-9] / '_') ([a-z] / [A-Z] / [0-9] / '_' / '-')*)?)> */ + /* 68 Key <- <(([a-z] / [A-Z] / [0-9] / '_') ([a-z] / [A-Z] / [0-9] / '_' / '-')* (':' ([a-z] / [A-Z] / [0-9] / '_') ([a-z] / [A-Z] / [0-9] / '_' / '-')*)?)> */ func() bool { - position263, tokenIndex263, depth263 := position, tokenIndex, depth + position271, tokenIndex271, depth271 := position, tokenIndex, depth { - position264 := position + position272 := position depth++ { - position265, tokenIndex265, depth265 := position, tokenIndex, depth + position273, tokenIndex273, depth273 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l266 + goto l274 } position++ - goto l265 - l266: - position, tokenIndex, depth = position265, tokenIndex265, depth265 + goto l273 + l274: + position, tokenIndex, depth = position273, tokenIndex273, depth273 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l267 + goto l275 } position++ - goto l265 - l267: - position, tokenIndex, depth = position265, tokenIndex265, depth265 + goto l273 + l275: + position, tokenIndex, depth = position273, tokenIndex273, depth273 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l268 + goto l276 } position++ - goto l265 - l268: - position, tokenIndex, depth = position265, tokenIndex265, depth265 + goto l273 + l276: + position, tokenIndex, depth = position273, tokenIndex273, depth273 if buffer[position] != rune('_') { - goto l263 + goto l271 } position++ } - l265: - l269: + l273: + l277: { - position270, tokenIndex270, depth270 := position, tokenIndex, depth + position278, tokenIndex278, depth278 := position, tokenIndex, depth { - position271, tokenIndex271, depth271 := position, tokenIndex, depth + position279, tokenIndex279, depth279 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l272 + goto l280 } position++ - goto l271 - l272: - position, tokenIndex, depth = position271, tokenIndex271, depth271 + goto l279 + l280: + position, tokenIndex, depth = position279, tokenIndex279, depth279 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l273 + goto l281 } position++ - goto l271 - l273: - position, tokenIndex, depth = position271, tokenIndex271, depth271 + goto l279 + l281: + position, tokenIndex, depth = position279, tokenIndex279, depth279 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l274 + goto l282 } position++ - goto l271 - l274: - position, tokenIndex, depth = position271, tokenIndex271, depth271 + goto l279 + l282: + position, tokenIndex, depth = position279, tokenIndex279, depth279 if buffer[position] != rune('_') { - goto l275 + goto l283 } position++ - goto l271 - l275: - position, tokenIndex, depth = position271, tokenIndex271, depth271 + goto l279 + l283: + position, tokenIndex, depth = position279, tokenIndex279, depth279 if buffer[position] != rune('-') { - goto l270 + goto l278 } position++ } - l271: - goto l269 - l270: - position, tokenIndex, depth = position270, tokenIndex270, depth270 + l279: + goto l277 + l278: + position, tokenIndex, depth = position278, tokenIndex278, depth278 } { - position276, tokenIndex276, depth276 := position, tokenIndex, depth + position284, tokenIndex284, depth284 := position, tokenIndex, depth if buffer[position] != rune(':') { - goto l276 + goto l284 } position++ { - position278, tokenIndex278, depth278 := position, tokenIndex, depth + position286, tokenIndex286, depth286 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l279 + goto l287 } position++ - goto l278 - l279: - position, tokenIndex, depth = position278, tokenIndex278, depth278 + goto l286 + l287: + position, tokenIndex, depth = position286, tokenIndex286, depth286 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l280 + goto l288 } position++ - goto l278 - l280: - position, tokenIndex, depth = position278, tokenIndex278, depth278 + goto l286 + l288: + position, tokenIndex, depth = position286, tokenIndex286, depth286 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l281 + goto l289 } position++ - goto l278 - l281: - position, tokenIndex, depth = position278, tokenIndex278, depth278 + goto l286 + l289: + position, tokenIndex, depth = position286, tokenIndex286, depth286 if buffer[position] != rune('_') { - goto l276 + goto l284 } position++ } - l278: - l282: + l286: + l290: { - position283, tokenIndex283, depth283 := position, tokenIndex, depth + position291, tokenIndex291, depth291 := position, tokenIndex, depth { - position284, tokenIndex284, depth284 := position, tokenIndex, depth + position292, tokenIndex292, depth292 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l285 + goto l293 } position++ - goto l284 - l285: - position, tokenIndex, depth = position284, tokenIndex284, depth284 + goto l292 + l293: + position, tokenIndex, depth = position292, tokenIndex292, depth292 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l286 + goto l294 } position++ - goto l284 - l286: - position, tokenIndex, depth = position284, tokenIndex284, depth284 + goto l292 + l294: + position, tokenIndex, depth = position292, tokenIndex292, depth292 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l287 + goto l295 } position++ - goto l284 - l287: - position, tokenIndex, depth = position284, tokenIndex284, depth284 + goto l292 + l295: + position, tokenIndex, depth = position292, tokenIndex292, depth292 if buffer[position] != rune('_') { - goto l288 + goto l296 } position++ - goto l284 - l288: - position, tokenIndex, depth = position284, tokenIndex284, depth284 + goto l292 + l296: + position, tokenIndex, depth = position292, tokenIndex292, depth292 if buffer[position] != rune('-') { - goto l283 + goto l291 } position++ } - l284: - goto l282 - l283: - position, tokenIndex, depth = position283, tokenIndex283, depth283 + l292: + goto l290 + l291: + position, tokenIndex, depth = position291, tokenIndex291, depth291 } - goto l277 - l276: - position, tokenIndex, depth = position276, tokenIndex276, depth276 + goto l285 + l284: + position, tokenIndex, depth = position284, tokenIndex284, depth284 } - l277: + l285: depth-- - add(ruleKey, position264) + add(ruleKey, position272) } return true - l263: - position, tokenIndex, depth = position263, tokenIndex263, depth263 + l271: + position, tokenIndex, depth = position271, tokenIndex271, depth271 return false }, - /* 67 Index <- <('[' [0-9]+ ']')> */ + /* 69 Index <- <('[' [0-9]+ ']')> */ func() bool { - position289, tokenIndex289, depth289 := position, tokenIndex, depth + position297, tokenIndex297, depth297 := position, tokenIndex, depth { - position290 := position + position298 := position depth++ if buffer[position] != rune('[') { - goto l289 + goto l297 } position++ if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l289 + goto l297 } position++ - l291: + l299: { - position292, tokenIndex292, depth292 := position, tokenIndex, depth + position300, tokenIndex300, depth300 := position, tokenIndex, depth if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l292 + goto l300 } position++ - goto l291 - l292: - position, tokenIndex, depth = position292, tokenIndex292, depth292 + goto l299 + l300: + position, tokenIndex, depth = position300, tokenIndex300, depth300 } if buffer[position] != rune(']') { - goto l289 + goto l297 } position++ depth-- - add(ruleIndex, position290) + add(ruleIndex, position298) } return true - l289: - position, tokenIndex, depth = position289, tokenIndex289, depth289 + l297: + position, tokenIndex, depth = position297, tokenIndex297, depth297 return false }, - /* 68 IP <- <([0-9]+ '.' [0-9]+ '.' [0-9]+ '.' [0-9]+)> */ + /* 70 IP <- <([0-9]+ '.' [0-9]+ '.' [0-9]+ '.' [0-9]+)> */ func() bool { - position293, tokenIndex293, depth293 := position, tokenIndex, depth + position301, tokenIndex301, depth301 := position, tokenIndex, depth { - position294 := position + position302 := position depth++ if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l293 + goto l301 } position++ - l295: + l303: { - position296, tokenIndex296, depth296 := position, tokenIndex, depth + position304, tokenIndex304, depth304 := position, tokenIndex, depth if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l296 + goto l304 } position++ - goto l295 - l296: - position, tokenIndex, depth = position296, tokenIndex296, depth296 + goto l303 + l304: + position, tokenIndex, depth = position304, tokenIndex304, depth304 } if buffer[position] != rune('.') { - goto l293 + goto l301 } position++ if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l293 + goto l301 } position++ - l297: + l305: { - position298, tokenIndex298, depth298 := position, tokenIndex, depth + position306, tokenIndex306, depth306 := position, tokenIndex, depth if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l298 + goto l306 } position++ - goto l297 - l298: - position, tokenIndex, depth = position298, tokenIndex298, depth298 + goto l305 + l306: + position, tokenIndex, depth = position306, tokenIndex306, depth306 } if buffer[position] != rune('.') { - goto l293 + goto l301 } position++ if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l293 + goto l301 } position++ - l299: + l307: { - position300, tokenIndex300, depth300 := position, tokenIndex, depth + position308, tokenIndex308, depth308 := position, tokenIndex, depth if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l300 + goto l308 } position++ - goto l299 - l300: - position, tokenIndex, depth = position300, tokenIndex300, depth300 + goto l307 + l308: + position, tokenIndex, depth = position308, tokenIndex308, depth308 } if buffer[position] != rune('.') { - goto l293 + goto l301 } position++ if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l293 + goto l301 } position++ - l301: + l309: { - position302, tokenIndex302, depth302 := position, tokenIndex, depth + position310, tokenIndex310, depth310 := position, tokenIndex, depth if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l302 + goto l310 } position++ - goto l301 - l302: - position, tokenIndex, depth = position302, tokenIndex302, depth302 + goto l309 + l310: + position, tokenIndex, depth = position310, tokenIndex310, depth310 } depth-- - add(ruleIP, position294) + add(ruleIP, position302) } return true - l293: - position, tokenIndex, depth = position293, tokenIndex293, depth293 + l301: + position, tokenIndex, depth = position301, tokenIndex301, depth301 return false }, - /* 69 ws <- <(' ' / '\t' / '\n' / '\r')*> */ + /* 71 ws <- <(' ' / '\t' / '\n' / '\r')*> */ func() bool { { - position304 := position + position312 := position depth++ - l305: + l313: { - position306, tokenIndex306, depth306 := position, tokenIndex, depth + position314, tokenIndex314, depth314 := position, tokenIndex, depth { - position307, tokenIndex307, depth307 := position, tokenIndex, depth + position315, tokenIndex315, depth315 := position, tokenIndex, depth if buffer[position] != rune(' ') { - goto l308 + goto l316 } position++ - goto l307 - l308: - position, tokenIndex, depth = position307, tokenIndex307, depth307 + goto l315 + l316: + position, tokenIndex, depth = position315, tokenIndex315, depth315 if buffer[position] != rune('\t') { - goto l309 + goto l317 } position++ - goto l307 - l309: - position, tokenIndex, depth = position307, tokenIndex307, depth307 + goto l315 + l317: + position, tokenIndex, depth = position315, tokenIndex315, depth315 if buffer[position] != rune('\n') { - goto l310 + goto l318 } position++ - goto l307 - l310: - position, tokenIndex, depth = position307, tokenIndex307, depth307 + goto l315 + l318: + position, tokenIndex, depth = position315, tokenIndex315, depth315 if buffer[position] != rune('\r') { - goto l306 + goto l314 } position++ } - l307: - goto l305 - l306: - position, tokenIndex, depth = position306, tokenIndex306, depth306 + l315: + goto l313 + l314: + position, tokenIndex, depth = position314, tokenIndex314, depth314 } depth-- - add(rulews, position304) + add(rulews, position312) } return true }, - /* 70 req_ws <- <(' ' / '\t' / '\n' / '\r')+> */ + /* 72 req_ws <- <(' ' / '\t' / '\n' / '\r')+> */ func() bool { - position311, tokenIndex311, depth311 := position, tokenIndex, depth + position319, tokenIndex319, depth319 := position, tokenIndex, depth { - position312 := position + position320 := position depth++ { - position315, tokenIndex315, depth315 := position, tokenIndex, depth + position323, tokenIndex323, depth323 := position, tokenIndex, depth if buffer[position] != rune(' ') { - goto l316 + goto l324 } position++ - goto l315 - l316: - position, tokenIndex, depth = position315, tokenIndex315, depth315 + goto l323 + l324: + position, tokenIndex, depth = position323, tokenIndex323, depth323 if buffer[position] != rune('\t') { - goto l317 + goto l325 } position++ - goto l315 - l317: - position, tokenIndex, depth = position315, tokenIndex315, depth315 + goto l323 + l325: + position, tokenIndex, depth = position323, tokenIndex323, depth323 if buffer[position] != rune('\n') { - goto l318 + goto l326 } position++ - goto l315 - l318: - position, tokenIndex, depth = position315, tokenIndex315, depth315 + goto l323 + l326: + position, tokenIndex, depth = position323, tokenIndex323, depth323 if buffer[position] != rune('\r') { - goto l311 + goto l319 } position++ } - l315: - l313: + l323: + l321: { - position314, tokenIndex314, depth314 := position, tokenIndex, depth + position322, tokenIndex322, depth322 := position, tokenIndex, depth { - position319, tokenIndex319, depth319 := position, tokenIndex, depth + position327, tokenIndex327, depth327 := position, tokenIndex, depth if buffer[position] != rune(' ') { - goto l320 + goto l328 } position++ - goto l319 - l320: - position, tokenIndex, depth = position319, tokenIndex319, depth319 + goto l327 + l328: + position, tokenIndex, depth = position327, tokenIndex327, depth327 if buffer[position] != rune('\t') { - goto l321 + goto l329 } position++ - goto l319 - l321: - position, tokenIndex, depth = position319, tokenIndex319, depth319 + goto l327 + l329: + position, tokenIndex, depth = position327, tokenIndex327, depth327 if buffer[position] != rune('\n') { - goto l322 + goto l330 } position++ - goto l319 - l322: - position, tokenIndex, depth = position319, tokenIndex319, depth319 + goto l327 + l330: + position, tokenIndex, depth = position327, tokenIndex327, depth327 if buffer[position] != rune('\r') { - goto l314 + goto l322 } position++ } - l319: - goto l313 - l314: - position, tokenIndex, depth = position314, tokenIndex314, depth314 + l327: + goto l321 + l322: + position, tokenIndex, depth = position322, tokenIndex322, depth322 } depth-- - add(rulereq_ws, position312) + add(rulereq_ws, position320) } return true - l311: - position, tokenIndex, depth = position311, tokenIndex311, depth311 + l319: + position, tokenIndex, depth = position319, tokenIndex319, depth319 return false }, + /* 74 Action0 <- <{}> */ + func() bool { + { + add(ruleAction0, position) + } + return true + }, } p.rules = _rules } diff --git a/dynaml/parser.go b/dynaml/parser.go index cf746e5..b50f845 100644 --- a/dynaml/parser.go +++ b/dynaml/parser.go @@ -122,6 +122,18 @@ func buildExpression(grammar *DynamlGrammar, path []string, stubPath []string) E Arguments: tokens.GetExpressionList(), }) + case ruleAction0: + case ruleProjectionValue: + value := &ProjectionValue{} + tokens.Push(ProjectionValueExpr{value}) + tokens.Push(ProjectionValueExpr{value}) + + case ruleProjection: + qual := tokens.Pop() + value := tokens.Pop() + expr := tokens.Pop() + tokens.Push(ProjectionExpr{expr, value.(ProjectionValueExpr).Value, qual}) + case ruleInteger: val, err := strconv.ParseInt(contents, 10, 64) if err != nil { diff --git a/dynaml/projection.go b/dynaml/projection.go new file mode 100644 index 0000000..b52897d --- /dev/null +++ b/dynaml/projection.go @@ -0,0 +1,96 @@ +package dynaml + +import ( + "fmt" + "github.com/mandelsoft/spiff/debug" + "github.com/mandelsoft/spiff/yaml" +) + +type ProjectionExpr struct { + Expression Expression + Value *ProjectionValue + Projection Expression +} + +func (e ProjectionExpr) Evaluate(binding Binding, locally bool) (interface{}, EvaluationInfo, bool) { + resolved := true + value, infoa, ok := ResolveExpressionOrPushEvaluation(&e.Expression, &resolved, nil, binding, false) + if !ok { + return nil, infoa, false + } + if !resolved { + return e, infoa, false + } + switch v := value.(type) { + case []yaml.Node: + if _, ok := e.Projection.(ProjectionValueExpr); ok { + return v, infoa, true + } else { + newList := make([]yaml.Node, len(v)) + for index, entry := range v { + result, _, ok := projectValue(e.Value, entry, e.Projection, binding, locally) + if !ok { + return nil, infoa, false + } + if !isLocallyResolvedValue(newList[index]) { + return e, infoa, true + } + if !locally && !isResolvedValue(newList[index]) { + return e, infoa, true + } + newList[index] = node(result, binding) + } + return newList, infoa, true + } + case map[string]yaml.Node: + newList := make([]yaml.Node, len(v)) + index := 0 + for _, key := range getSortedKeys(v) { + result, _, ok := projectValue(e.Value, v[key], e.Projection, binding, locally) + if !ok { + return nil, infoa, false + } + if !isLocallyResolvedValue(newList[index]) { + return e, infoa, true + } + if !locally && !isResolvedValue(newList[index]) { + return e, infoa, true + } + newList[index] = node(result, binding) + index++ + } + return newList, infoa, true + default: + return infoa.Error("only map or list allowed for projection") + } +} + +func projectValue(ref *ProjectionValue, value yaml.Node, expr Expression, binding Binding, locally bool) (interface{}, EvaluationInfo, bool) { + ref.Value = value.Value() + root, info, ok := expr.Evaluate(binding, locally) + if !ok { + return nil, info, false + } + return root, info, true +} + +func (e ProjectionExpr) String() string { + return fmt.Sprintf("%s.[*] %s", e.Expression, e.Projection) +} + +type ProjectionValue struct { + Value interface{} +} + +type ProjectionValueExpr struct { + Value *ProjectionValue +} + +func (e ProjectionValueExpr) Evaluate(binding Binding, locally bool) (interface{}, EvaluationInfo, bool) { + debug.Debug("projection of value: %+v\n", e.Value.Value) + return e.Value.Value, DefaultInfo(), true +} + +func (e ProjectionValueExpr) String() string { + return "" +} diff --git a/flow/flow_test.go b/flow/flow_test.go index d920862..16732e0 100644 --- a/flow/flow_test.go +++ b/flow/flow_test.go @@ -5584,6 +5584,253 @@ data: }) }) + Describe("when projecting", func() { + Context("a list", func() { + It("it handles an identity projection", func() { + source := parseYAML(` +--- +list: + - name: a + value: aValue + - name: b + value: bValue + - name: c + value: cValue + +projection: (( .list.[*] )) +`) + resolved := parseYAML(` +--- +list: + - name: a + value: aValue + - name: b + value: bValue + - name: c + value: cValue +projection: + - name: a + value: aValue + - name: b + value: bValue + - name: c + value: cValue +`) + Expect(source).To(FlowAs(resolved)) + }) + + It("it handles a field projection", func() { + source := parseYAML(` +--- +list: + - name: a + value: aValue + - name: b + value: bValue + - name: c + value: cValue + +projection: (( .list.[*].value )) +`) + resolved := parseYAML(` +--- +list: + - name: a + value: aValue + - name: b + value: bValue + - name: c + value: cValue +projection: + - aValue + - bValue + - cValue +`) + Expect(source).To(FlowAs(resolved)) + }) + + It("it handles a field projection for a slice", func() { + source := parseYAML(` +--- +list: + - name: a + value: aValue + - name: b + value: bValue + - name: c + value: cValue + +projection: (( .list.[1..2].value )) +`) + resolved := parseYAML(` +--- +list: + - name: a + value: aValue + - name: b + value: bValue + - name: c + value: cValue +projection: + - bValue + - cValue +`) + Expect(source).To(FlowAs(resolved)) + }) + }) + + Context("a map", func() { + It("it handles a value projection", func() { + source := parseYAML(` +--- +map: + zz: + name: a + value: aValue + xx: + name: b + value: bValue + yy: + name: c + value: cValue + +projection: (( .map.[*] )) +`) + resolved := parseYAML(` +--- +map: + zz: + name: a + value: aValue + xx: + name: b + value: bValue + yy: + name: c + value: cValue +projection: + - name: b + value: bValue + - name: c + value: cValue + - name: a + value: aValue +`) + Expect(source).To(FlowAs(resolved)) + }) + + It("it handles a field value projection", func() { + source := parseYAML(` +--- +map: + zz: + name: a + value: aValue + xx: + name: b + value: bValue + yy: + name: c + value: cValue + +projection: (( .map.[*].value )) +`) + resolved := parseYAML(` +--- +map: + zz: + name: a + value: aValue + xx: + name: b + value: bValue + yy: + name: c + value: cValue +projection: + - bValue + - cValue + - aValue +`) + Expect(source).To(FlowAs(resolved)) + }) + }) + + Context("in combination", func() { + It("it handles chained projections", func() { + source := parseYAML(` +--- +map: + zz: + name: a + value: aValue + xx: + name: b + value: bValue + yy: + name: c + value: cValue + +projection: (( (.map.[*]).[1..2] )) +`) + resolved := parseYAML(` +--- +map: + zz: + name: a + value: aValue + xx: + name: b + value: bValue + yy: + name: c + value: cValue +projection: + - name: c + value: cValue + - name: a + value: aValue +`) + Expect(source).To(FlowAs(resolved)) + }) + + It("it handles nested projections", func() { + source := parseYAML(` +--- +list: +- zz: + name: a + value: aValue +- xx: + name: b + value: bValue +- yy: + name: c + value: cValue + +projection: (( .list.[1..2].[*].value )) +`) + resolved := parseYAML(` +--- +list: +- zz: + name: a + value: aValue +- xx: + name: b + value: bValue +- yy: + name: c + value: cValue +projection: + - - bValue + - - cValue +`) + Expect(source).To(FlowAs(resolved)) + }) + }) + }) + Describe("when merging inline maps", func() { It("it overrides field", func() { source := parseYAML(` From a0917c368660c995b8034d5b03dc853586f57736 Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Thu, 13 Jul 2017 12:40:25 +0200 Subject: [PATCH 03/13] import yaml documents --- README.md | 2 ++ dynaml/read.go | 7 +++++++ 2 files changed, 9 insertions(+) diff --git a/README.md b/README.md index dbe2cfc..32fd28d 100644 --- a/README.md +++ b/README.md @@ -1473,6 +1473,8 @@ A yaml document will be parsed and the tree is returned. The elements of the tr Additionally the yaml file may again contain dynaml expressions. All included dynaml expressions will be evaluated in the context of the reading expression. This means that the same file included at different places in a yaml document may result in different sub trees, depending on the used dynaml expressions. +If the read type is set to `import`, the file content is read as yaml document and the root node is used to substitute the expression. Potential dynaml expressions contained in the document will not be evaluated with the actual binding of the expression but as it would have been part of the original file. + #### text documents A text document will be returned as single string. diff --git a/dynaml/read.go b/dynaml/read.go index 8be0b4a..8e21e35 100644 --- a/dynaml/read.go +++ b/dynaml/read.go @@ -62,6 +62,13 @@ func func_read(arguments []interface{}, binding Binding) (interface{}, Evaluatio debug.Debug("resolving yaml file succeeded") info.Source = file return result.Value(), info, true + case "import": + node, err := yaml.Parse(file, data) + if err != nil { + return info.Error("error parsing stub [%s]: %s", path.Clean(file), err) + } + info.Source = file + return node.Value(), info, true case "text": info.Source = file From e8bc31b4fd57570563cdb125e7ceb153078f8765 Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Sat, 20 Jan 2018 09:43:37 +0100 Subject: [PATCH 04/13] explict expression group for marshal of expressions --- dynaml/grouped.go | 17 +++++++++++++++++ dynaml/parser.go | 3 ++- dynaml/parser_test.go | 6 +++--- 3 files changed, 22 insertions(+), 4 deletions(-) create mode 100644 dynaml/grouped.go diff --git a/dynaml/grouped.go b/dynaml/grouped.go new file mode 100644 index 0000000..636967f --- /dev/null +++ b/dynaml/grouped.go @@ -0,0 +1,17 @@ +package dynaml + +import ( + "fmt" +) + +type GroupedExpr struct { + Expr Expression +} + +func (e GroupedExpr) String() string { + return fmt.Sprintf("( %s )", e.Expr) +} + +func (e GroupedExpr) Evaluate(binding Binding, locally bool) (interface{}, EvaluationInfo, bool) { + return e.Expr.Evaluate(binding, locally) +} diff --git a/dynaml/parser.go b/dynaml/parser.go index b50f845..720c88e 100644 --- a/dynaml/parser.go +++ b/dynaml/parser.go @@ -77,6 +77,8 @@ func buildExpression(grammar *DynamlGrammar, path []string, stubPath []string) E tokens.Push(rhs) case rulePrefer: tokens.Push(PreferExpr{tokens.Pop()}) + case ruleGrouped: + tokens.Push(GroupedExpr{tokens.Pop()}) case ruleAuto: tokens.Push(AutoExpr{path}) case ruleMerge: @@ -288,7 +290,6 @@ func buildExpression(grammar *DynamlGrammar, path []string, stubPath []string) E tokens.SetExpressionList(tokens.PopExpressionList()) case ruleKey, ruleIndex: - case ruleGrouped: case ruleLevel0, ruleLevel1, ruleLevel2, ruleLevel3, ruleLevel4, ruleLevel5, ruleLevel6, ruleLevel7: case ruleExpression: case ruleMap: diff --git a/dynaml/parser_test.go b/dynaml/parser_test.go index 676f062..eb2f86d 100644 --- a/dynaml/parser_test.go +++ b/dynaml/parser_test.go @@ -293,7 +293,7 @@ var _ = Describe("parsing", func() { parsesAs( `(foo)(1)`, CallExpr{ - ReferenceExpr{[]string{"foo"}}, + GroupedExpr{ReferenceExpr{[]string{"foo"}}}, []Expression{ IntegerExpr{1}, }, @@ -358,10 +358,10 @@ var _ = Describe("parsing", func() { parsesAs( `("foo" - bar) - merge`, SubtractionExpr{ - SubtractionExpr{ + GroupedExpr{SubtractionExpr{ StringExpr{"foo"}, ReferenceExpr{[]string{"bar"}}, - }, + }}, MergeExpr{}, }, ) From 89399ae555f383e4f78c68bd5cc8966e622a03aa Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Sat, 20 Jan 2018 09:44:01 +0100 Subject: [PATCH 05/13] support of multi document templates --- README.md | 237 +++++++++++++++++++++++++++++++----------------- flow/cascade.go | 24 +++-- spiff++.go | 70 +++++++++----- spiff_test.go | 7 +- 4 files changed, 225 insertions(+), 113 deletions(-) diff --git a/README.md b/README.md index 32fd28d..0138fd9 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,19 @@ --- +*spiff* is a command line tool and declarative in-domain hybrid YAML templating system. While regular templating systems process a template file by substituting the template expressions by values taken from +external data sources, in-domain means that the templating engine knows about the syntax and structure of the processed template. It therefore can take the values for the template expressions directly +from the document processed, including those parts denoted by the template expressions itself. + +For example: +```yaml +resource: + name: bosh deployment + version: 25 + url: (( "http://resource.location/bosh?version=" version )) + description: (( "This document describes a " name " located at " url )) +``` + spiff is a command line tool and declarative YAML templating system, specially designed for generating BOSH deployment manifests. Contents: @@ -131,12 +144,23 @@ The ` merge` command offers the option `--partial`. If this option is given spif It is possible to read one file from standard input by using the file name `-`. It may be used only once. This allows using spiff as part of a pipeline to just process a single stream or to process a stream based on several templates/stubs. +The template file (first argument) may be a multi document file containing multiple YAML documents +separated by a line containing only `---`. Each YAML document will be +processed independently with the given stub files. + ### `spiff diff manifest.yml other-manifest.yml` Show structural differences between two deployment manifests. -Unlike 'bosh diff', this command has semantic knowledge of a deployment -manifest, and is not just text-based. It also doesn't modify either file. +Unlike basic diffing tools and even `bosh diff`, this command has semantic +knowledge of a deployment manifest, and is not just text-based. For example, +if two manifests are the same except they have some jobs listed in different +orders, `spiff diff` will detect this, since job order matters in a manifest. +On the other hand, if two manifests differ only in the order of their +resource pools, for instance, then it will yield and empty diff since +resource pool order doesn't actually matter for a deployment. + +Also unlike `bosh diff`, this command doesn't modify either file. It's tailed for checking differences between one deployment and the next. @@ -239,7 +263,7 @@ from: If the path starts with a dot (`.`) the path is always evaluated from the root of the document. -List entries consisting of a map with `name` field can directly be addressed +List entries consisting of a map with `name` field can directly be addressed by their name value. e.g.: @@ -327,7 +351,7 @@ String literal. The only escape character handled currently is '"'. ## `(( [ 1, 2, 3 ] ))` -List literal. The list elements might again be expressions. There is a special list literal `[1 .. -1]`, that can be used to resolve an increasing or descreasing number range to a list. +List literal. The list elements might again be expressions. There is a special list literal `[1 .. -1]`, that can be used to resolve an increasing or descreasing number range to a list. e.g.: @@ -351,7 +375,7 @@ the key and the value, might again be expressions, whereby the key expression mu evaluate to a string. This way it is possible to create maps with non-static keys. The assignment operator `=` has been chosen instead of the regular colon `:` character used in yaml, because this would result in conflicts with the yaml -syntax. +syntax. A map literal might consist of any number of field assignments separated by a comma `,`. @@ -425,7 +449,7 @@ Concatenation of maps as expression. Any sequences of maps can be concatenated, e.g.: ```yaml -foo: +foo: alice: 24 bob: 25 @@ -439,7 +463,7 @@ concat: (( foo bar )) yields ```yaml -foo: +foo: alice: 24 bob: 25 @@ -507,13 +531,13 @@ Merging of maps or lists with the content of the same element found in some stub ** Attention ** This form of `merge` has a compatibility propblem. In versions before 1.0.8, this expression -was never parsed, only the existence of the key `<<:` was relevant. Therefore there are often +was never parsed, only the existence of the key `<<:` was relevant. Therefore there are often usages of `<<: (( merge ))` where `<<: (( merge || nil ))` is meant. The first variant would require content in at least one stub (as always for the merge operator). Now this expression is evaluated correctly, but this would break existing manifest template sets, which use the first variant, but mean the second. Therfore this case is explicitly handled to describe an optional merge. If really a required merge is meant an additional explicit qualifier has to -be used (`(( merge required ))`). +be used (`(( merge required ))`). #### Merging maps @@ -734,7 +758,7 @@ Redirecting merges can be used as direct field value, also. They can be combined foo: a: 10 b: 20 - + bar: a: 1 b: 2 @@ -764,7 +788,7 @@ Another way doing a merge with another element in some stub could also be done t foo: a: 10 b: 20 - + bar: a: 1 b: 2 @@ -772,15 +796,15 @@ bar: **template.yml** ```yaml -bar: +bar: <<: (( merge )) b: 3 c: 4 - + foo: (( bar )) ``` -But in this scenario the merge still performs the deep merge with the original element name. Therefore +But in this scenario the merge still performs the deep merge with the original element name. Therefore `spiff merge template.yml values.yml` yields: ```yaml @@ -883,6 +907,39 @@ ip: 10.10.10.10 range: 10.10.10.10-10.11.11.1 ``` +Subtraction also works on two IP addresses to calculate the number of +IP addresses between two IP addresses. + +e.g.: + +```yaml +diff: (( 10.0.1.0 - 10.0.0.1 + 1 )) +``` + +yields the value 256. IP address constants can be directly used in dynaml +expressions. They are implicitly converted to strings and back to IP +addresses if required by an operation. + +Multiplication and division can be used to handle IP range shifts on CIDRs. +With division a network can be partioned. The network size is increased +to allow at least a dedicated number of subnets below the original CIDR. +Multiplication then can be used to get the n-th next subnet of the same +size. + +e.g.: + +```yaml +subnet: (( "10.1.2.1/24" / 12 )) # first subnet CIDR for 16 subnets +next: (( "10.1.2.1/24" / 12 * 2)) # 2nd next (3rd) subnet CIDRS +``` + +yields + +```yaml +subnet: 10.1.2.0/28 +next: 10.1.2.32/28 +``` + Additionally there are functions working on IPv4 CIDRs: ```yaml @@ -901,19 +958,6 @@ next: 192.168.1.0 num: 192.168.0.0+256=192.168.1.0 ``` -Subtraction also works on two IP addresses to calculate the number of -IP addresses between two IP addresses. - -e.g.: - -```yaml -diff: (( 10.0.1.0 - 10.0.0.1 + 1 )) -``` - -yields the value 256. IP address constants can be directly used in dynaml -expressions. They are implicitly converted to strings and back to IP -addresses if required by an operation. - ## `(( a > 1 ? foo :bar ))` Dynaml supports the comparison operators `<`, `<=`, `==`, `!=`, `>=` and `>`. The comparison operators work on @@ -940,7 +984,7 @@ The operators `-or` and `-and` can be used to combine comparison operators to co **Remark:** -The more traditional operator symbol `||` (and `&&`) cannot be used here, because the operator `||` already exists in dynaml with a different semantic, that does not hold for logical operations. The expression `false || true` evaluates to `false`, because it yields the first operand, if it is defined, regardless of its value. To be as compatible as possible this cannot be changed and the bare symbols `or` and `and` cannot be be used, because this would invalidate the concatenation of references with such names. +The more traditional operator symbol `||` (and `&&`) cannot be used here, because the operator `||` already exists in dynaml with a different semantic, that does not hold for logical operations. The expression `false || true` evaluates to `false`, because it yields the first operand, if it is defined, regardless of its value. To be as compatible as possible this cannot be changed and the bare symbols `or` and `and` cannot be be used, because this would invalidate the concatenation of references with such names. ## `(( 5 -or 6 ))` @@ -955,11 +999,11 @@ result: (( functionname(arg, arg, ...) )) ``` Additional functions may be defined as part of the yaml document using [lambda expressions](#-lambda-x-x--port-). The function name then is either a grouped expression or the path to the node hosting the lambda expression. - + ### `(( format( "%s %d", alice, 25) ))` Format a string based on arguments given by dynaml expressions. There is a second flavor of this function: `error` formats an error message and sets the evaluation to failed. - + ### `(( join( ", ", list) ))` @@ -1253,7 +1297,7 @@ list: length: 2 ``` -### `(( base64(string) ))` +### `(( base64(string) ))` The function `base64` generates a base64 encoding of a given string. `base64_decode` decodes a base64 encoded string. @@ -1271,7 +1315,7 @@ base54: dGVzdA== test: test ``` -### `(( md5(string) ))` +### `(( md5(string) ))` The function `md5` generates an md5 hash for the given string. @@ -1461,7 +1505,7 @@ Read the value of an environment variable whose name is given as dynaml expressi In a second flavor the function `env` accepts multiple arguments and/or list arguments, which are joined to a single list. Every entry in this list is used as name of an environment variable and the result of the function is a map of the given given variables as yaml element. Hereby non-existent environment variables are omitted. -### `(( read("file.yml") ))` +### `(( read("file.yml") ))` Read a file and return its content. There is support for two content types: `yaml` files and `text` files. If the file suffix is `.yml`, by default the yaml type is used. An optional second parameter can be used @@ -1471,7 +1515,7 @@ to explicitly specifiy the desired return type: `yaml` or `text`. A yaml document will be parsed and the tree is returned. The elements of the tree can be accessed by regular dynaml expressions. -Additionally the yaml file may again contain dynaml expressions. All included dynaml expressions will be evaluated in the context of the reading expression. This means that the same file included at different places in a yaml document may result in different sub trees, depending on the used dynaml expressions. +Additionally the yaml file may again contain dynaml expressions. All included dynaml expressions will be evaluated in the context of the reading expression. This means that the same file included at different places in a yaml document may result in different sub trees, depending on the used dynaml expressions. If the read type is set to `import`, the file content is read as yaml document and the root node is used to substitute the expression. Potential dynaml expressions contained in the document will not be evaluated with the actual binding of the expression but as it would have been part of the original file. @@ -1619,7 +1663,7 @@ networks: `static_ips`also accepts list arguments, as long as all transitivly contained elements are either again lists or integer values. This allows to abbreviate the list of IPs as follows: ``` - static_ips: (( static_ips([1..5]) )) + static_ips: (( static_ips([1..5]) )) ``` ### `(( ipset(ranges, 3, 3,4,5,6) ))` @@ -1628,7 +1672,7 @@ While the function [static_ips](#-static_ips0-1-3-) for historical reasons relies on the structure of a bosh manifest and works only at dedicated locations in the manifest, the function *ipset* offers a similar calculation purely based on its arguments. So, the available -ip ranges and the required numbers of IPs are passed as arguments. +ip ranges and the required numbers of IPs are passed as arguments. The first (ranges) argument can be a single range as a simple string or a list of strings. Every string might be @@ -1637,7 +1681,7 @@ list of strings. Every string might be - a CIDR The second argument specifies the requested number of IP addresses in the -result set. +result set. The additional arguments specify the indices of the IPs to choose (starting from 0) in the given ranges. Here again lists of indices might be used. @@ -1653,7 +1697,7 @@ ipset: (( ipset(ranges,3,[256..260]) )) resolves *ipset* to `[ 10.0.2.0, 10.0.2.1, 10.0.2.2 ]`. -If no IP indices are specified (only two arguments), the IPs are chosen +If no IP indices are specified (only two arguments), the IPs are chosen starting from the beginning of the first range up to the end of the last given range, without indirection. @@ -1693,7 +1737,7 @@ In combination with templates and lambda expressions this can be used to generat ### `(( makemap(fieldlist) ))` -In this flavor `makemap` creates a map with entries described by the given field list. +In this flavor `makemap` creates a map with entries described by the given field list. The list is expected to contain maps with the entries `key` and `value`, describing dedicated map entries. @@ -1703,7 +1747,7 @@ e.g.: list: - key: alice value: 24 - - key: bob + - key: bob value: 25 - key: 5 value: 25 @@ -1711,14 +1755,14 @@ list: map: (( makemap(list) )) ``` -yields +yields ```yaml list: - key: alice value: 24 - - key: bob + - key: bob value: 25 - key: 5 value: 25 @@ -1734,7 +1778,7 @@ If the key value is a boolean or an integer it will be mapped to a string. ### `(( makemap(key, value) ))` In this flavor `makemap` creates a map with entries described by the given argument -pairs. The arguments may be a sequence of key/values pairs (given by separate arguments). +pairs. The arguments may be a sequence of key/values pairs (given by separate arguments). e.g.: @@ -1742,7 +1786,7 @@ e.g.: map: (( makemap("peter", 23, "paul", 22) )) ``` -yields +yields ```yaml @@ -1756,7 +1800,7 @@ In contrast to the previous `makemap` flavor, this one could also be handled by ### `(( merge(map1, map2) ))` -Beside the keyword ` merge` there is also a function called `merge` (It must always be followed by an opensing bracket). It can be used to merge severals maps taken from the actual document. If the maps are specified by reference expressions, they cannot contain +Beside the keyword ` merge` there is also a function called `merge` (It must always be followed by an opening bracket). It can be used to merge severals maps taken from the actual document analogous to the stub merge process. If the maps are specified by reference expressions, they cannot contain any _dynaml_ expressions, because they are always evaluated in the context of the actual document before evaluating the arguments. e.g.: @@ -1764,7 +1808,7 @@ e.g.: ```yaml map1: alice: 24 - bob: 25 + bob: (( alice )) map2: alice: 26 peter: 8 @@ -1776,7 +1820,30 @@ resolves `result` to ```yaml result: alice: 26 - bob: 25 + bob: 24 # <---- expression evaluated before mergeing +``` + +Alternatively map [templates](#templates) can be passed (without evaluation operator!). In this case the _dynaml_ expressions from the template are evaluated while merging the given documents as for regular calls of _spiff merge_. + +e.g.: + +```yaml +map1: + <<: (( &template )) + alice: 24 + bob: (( alice )) +map2: + alice: 26 + peter: 8 +result: (( merge(map1,map2) )) +``` + +resolves `result` to + +```yaml +result: + alice: 26 + bob: 26 ``` A map might also be given by a map expression. Here it is possible to specify @@ -1996,10 +2063,10 @@ list: age: 25 - name: bob age: 24 - + ages: (( map[list|i,p|->i + 1 ". " p.name " is " p.age ] )) ``` - + yields ```yaml @@ -2008,7 +2075,7 @@ list: age: 25 - name: bob age: 24 - + ages: - 1. alice is 25 - 2. bob is 24 @@ -2080,10 +2147,10 @@ list: - 1 - 2 - 3 - + prod: (( sum[list|0|s,i,x|->s + i * x ] )) ``` - + yields ```yaml @@ -2091,7 +2158,7 @@ list: - 1 - 2 - 3 - + prod: 8 ``` @@ -2254,7 +2321,7 @@ foo: <<: (( &template )) alice: alice bob: (( verb " " alice )) - + use: subst: alice: alice @@ -2357,9 +2424,9 @@ The complete grammar can be found in [dynaml.peg](dynaml/dynaml.peg). # Structural Auto-Merge -By default `spiff` performs a deep structural merge of its first argument, the template file, with the given stub files. The merge is processed from right to left, providing an intermediate merged stub for every step. This means, that for every step all expressions must be locally resolvable. +By default `spiff` performs a deep structural merge of its first argument, the template file, with the given stub files. The merge is processed from right to left, providing an intermediate merged stub for every step. This means, that for every step all expressions must be locally resolvable. -Structural merge means, that besides explicit dynaml `merge` expressions, values will be overridden by values of equivalent nodes found in right-most stub files. In general, flat value lists are not merged. Only lists of maps can be merged by entries in a stub with a matching index. +Structural merge means, that besides explicit dynaml `merge` expressions, values will be overridden by values of equivalent nodes found in right-most stub files. In general, flat value lists are not merged. Only lists of maps can be merged by entries in a stub with a matching index. There is a special support for the auto-merge of lists containing maps, if the maps contain a `name` field. Hereby the list is handled like a map with @@ -2396,7 +2463,7 @@ list: and file **stub.yml**: ```yaml -foo: +foo: - name: bob bar: stub @@ -2457,7 +2524,7 @@ networks: (( merge )) **cf.yml** ```yaml -utils: (( merge )) +utils: (( merge )) network: (( merge )) meta: (( merge )) @@ -2500,7 +2567,7 @@ utils: ```yaml meta: deployment_no: 1 - + ``` will yield a network setting for a dedicated deployment @@ -2531,7 +2598,7 @@ Using a different `instance.yml` ```yaml meta: deployment_no: 0 - + ``` will yield a network setting for a second deployment providing the appropriate settings for a unique other IP block. @@ -2592,7 +2659,7 @@ networks: - _The auto merge never adds nodes to existing structures_ For example, merging - + **template.yml** ```yaml foo: @@ -2627,7 +2694,7 @@ networks: - _Simple node values are replaced by values or complete structures coming from stubs, structures are deep_ merged. For example, merging - + **template.yml** ```yaml foo: (( ["alice"] )) @@ -2636,40 +2703,40 @@ networks: **stub.yml** ```yaml - foo: + foo: - peter - paul - ``` + ``` yields ```yaml foo: - peter - - paul + - paul ``` But the template ```yaml - foo: [ (( "alice" )) ] + foo: [ (( "alice" )) ] ``` is merged without any change. - _Expressions are subject to be overridden as a whole_ - + A consequence of the behaviour described above is that nodes described by an expession are basically overridden by a complete merged structure, instead of doing a deep merge with the structues resulting from the expression evaluation. For example, merging - + **template.yml** ```yaml men: - bob: 24 women: - alice: 25 - + people: (( women men )) ``` with @@ -2686,7 +2753,7 @@ networks: - bob: 24 women: - alice: 25 - + people: - alice: 24 ``` @@ -2698,7 +2765,7 @@ networks: - bob: 24 women: - alice: 25 - + people: - alice: 24 - bob: 24 @@ -2773,7 +2840,7 @@ networks: template: <<: (( &template )) bob: (( x " " y )) - + banda: bob: loves alice ``` @@ -2802,14 +2869,14 @@ networks: prd: 24 sum: 10 ``` - + - _Taking advantage of the *undefined* value_ At first glance it might look strange to introduce a value for *undefined*. But it can be really useful as will become apparent with the following examples. - - Whenever a stub syntactically defines a field it overwrites the default in the template during - merging. Therefore it would not be possible to define some expression for that field that eventually + - Whenever a stub syntactically defines a field it overwrites the default in the template during + merging. Therefore it would not be possible to define some expression for that field that eventually keeps the default value. Here the *undefined* value can help: e.g.: merging @@ -2839,7 +2906,7 @@ networks: * There is a problem accessing upstream values. This is only possible if the local stub contains the definition of the field to use. But then there will always be a value for this field, even if the upstream does not overwrite it. - + Here the *undefined* value can help by providing optional access to upstream values. Optional means, that the field is only defined, if there is an upstream value. Otherwise it is undefined for the expressions in the local stub and potential downstream templates. This is @@ -2862,13 +2929,13 @@ networks: config: alice: (( ~~ )) bob: (( ~~ )) - + alice: (( config.alice || ~~ )) bob: (( config.bob || ~~ )) peter: (( config.peter || ~~ )) ``` - and + and **config.yml** ```yaml @@ -2883,7 +2950,7 @@ networks: bob: 25 # kept default value, because not set in config.yml peter: 26 # kept, because mapping source not available in mapping.yml ``` - + This can be used to add an intermediate stub, that offers a dedicated configuration interface and contains logic to map this interface to a manifest structure already defining default values. @@ -2891,7 +2958,7 @@ networks: - _Templates versus map literals_ As described earlier templates can be used inside functions and mappings to - easily describe complex data structures based on expressions refering to + easily describe complex data structures based on expressions refering to parameters. Before the introduction of map literals this was the only way to achieve such behaviour. The advantage is the possibility to describe the complex structure as regular part of a yaml document, which allows using @@ -2953,7 +3020,7 @@ networks: ```yaml range: (( (|cidr,first,size|->(*templates.addr).range)("10.0.0.0/16",10,255) )) - + templates: addr: <<: (( &template )) @@ -2985,13 +3052,13 @@ If a dynaml expression cannot be resolved to a value, it is reported by the ``` (( )) in () ``` - + e.g.: -``` +``` (( min_ip("10") )) in source.yml node.a.[0] () *CIDR argument required ``` - + Cyclic dependencies are detected by iterative evaluation until the document is unchanged after a step. Nodes involved in a cycle are therefore typically reported just as unresolved node without a specific issue. @@ -3007,4 +3074,4 @@ tag. The following tags are used (in reporting order): Problems occuring during inline template processing are reported as nested problems. The classification is propagated to the outer node. - + diff --git a/flow/cascade.go b/flow/cascade.go index 12b2f03..99e0863 100644 --- a/flow/cascade.go +++ b/flow/cascade.go @@ -4,23 +4,35 @@ import ( "github.com/mandelsoft/spiff/yaml" ) -func Cascade(template yaml.Node, partial bool, templates ...yaml.Node) (yaml.Node, error) { - for i := len(templates) - 1; i >= 0; i-- { - flowed, err := Flow(templates[i], templates[i+1:]...) +func PrepareStubs(partial bool, stubs ...yaml.Node) ([]yaml.Node, error) { + for i := len(stubs) - 1; i >= 0; i-- { + flowed, err := Flow(stubs[i], stubs[i+1:]...) if !partial && err != nil { - return nil, err + return nil,err } - templates[i] = Cleanup(flowed, testLocal) + stubs[i] = Cleanup(flowed, testLocal) } + return stubs,nil +} - result, err := Flow(template, templates...) +func Apply(template yaml.Node, prepared []yaml.Node) (yaml.Node, error) { + result, err := Flow(template, prepared...) if err == nil { result = Cleanup(result, testTemporary) } return result, err } +func Cascade(template yaml.Node, partial bool, stubs ...yaml.Node) (yaml.Node, error) { + prepared,err:=PrepareStubs(partial, stubs...) + if err!=nil { + return nil,err + } + + return Apply(template,prepared) +} + func testTemporary(node yaml.Node) bool { return node.Temporary() || node.Local() } diff --git a/spiff++.go b/spiff++.go index 89cb2ba..fdd59dd 100644 --- a/spiff++.go +++ b/spiff++.go @@ -89,13 +89,7 @@ func merge(templateFilePath string, partial bool, stubFilePaths []string) { log.Fatalln(fmt.Sprintf("error reading template [%s]:", path.Clean(templateFilePath)), err) } - templateYAML, err := yaml.Parse(templateFilePath, templateFile) - if err != nil { - log.Fatalln(fmt.Sprintf("error parsing template [%s]:", path.Clean(templateFilePath)), err) - } - stubs := []yaml.Node{} - for _, stubFilePath := range stubFilePaths { var stubFile []byte var err error @@ -116,27 +110,61 @@ func merge(templateFilePath string, partial bool, stubFilePaths []string) { if err != nil { log.Fatalln(fmt.Sprintf("error parsing stub [%s]:", path.Clean(stubFilePath)), err) } - stubs = append(stubs, stubYAML) } - flowed, err := flow.Cascade(templateYAML, partial, stubs...) + prepared, err := flow.PrepareStubs(partial, stubs...) if !partial && err != nil { - legend := "\nerror classification:\n" + - " *: error in local dynaml expression\n" + - " @: dependent of or involved in a cycle\n" + - " -: depending on a node with an error" - log.Fatalln("error generating manifest:", err, legend) - } - if err != nil { - flowed = dynaml.ResetUnresolvedNodes(flowed) - } - yaml, err := candiedyaml.Marshal(flowed) - if err != nil { - log.Fatalln("error marshalling manifest:", err) + legend := "\nerror classification:\n" + + " *: error in local dynaml expression\n" + + " @: dependent of or involved in a cycle\n" + + " -: depending on a node with an error" + log.Fatalln("error generating manifest:", err, legend) } - fmt.Println(string(yaml)) + docs := strings.Split(string(templateFile), "\n---\n") + out := "" + for i, doc := range docs { + if strings.Trim(doc," \n") == "" { + if i>0 && out!="" { + out += "---\n" + } + continue + } + suffix := "" + if len(docs) > 1 { + suffix = fmt.Sprintf(":%d", i+1) + } + templateYAML, err := yaml.Parse(templateFilePath+suffix, []byte(doc)) + if err != nil { + log.Fatalln(fmt.Sprintf("error parsing template [%s]:", path.Clean(templateFilePath)), err) + } + + flowed, err := flow.Apply(templateYAML, prepared) + if !partial && err != nil { + legend := "\nerror classification:\n" + + " *: error in local dynaml expression\n" + + " @: dependent of or involved in a cycle\n" + + " -: depending on a node with an error" + log.Fatalln("error generating manifest:", err, legend) + } + if err != nil { + flowed = dynaml.ResetUnresolvedNodes(flowed) + } + yaml, err := candiedyaml.Marshal(flowed) + if err != nil { + log.Fatalln("error marshalling manifest:", err) + } + s:=string(yaml) + if i > 0 && out!="" { + out += "---\n" + } + out += s + if !strings.HasSuffix(s,"\n") { + out+="\n" + } + } + fmt.Println(out) } func diff(aFilePath, bFilePath string, separator string) { diff --git a/spiff_test.go b/spiff_test.go index f245fdd..b744044 100644 --- a/spiff_test.go +++ b/spiff_test.go @@ -39,6 +39,8 @@ var _ = Describe("Running spiff", func() { basicTemplate.Write([]byte(` --- foo: bar +--- +alice: bob `)) merge, err = Start(exec.Command(spiff, "merge", basicTemplate.Name()), GinkgoWriter, GinkgoWriter) Expect(err).NotTo(HaveOccurred()) @@ -50,7 +52,10 @@ foo: bar It("resolves the template and prints it out", func() { Expect(merge.Wait()).To(Exit(0)) - Expect(merge.Out).To(Say(`foo: bar`)) + Expect(merge.Out).To(Say(`foo: bar +--- +alice: bob +`)) }) }) }) From a10899f8502d08515410c9fc649ad94c36854eb1 Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Sun, 30 Dec 2018 20:57:56 +0100 Subject: [PATCH 06/13] explicit group expression node --- dynaml/grouped.go | 17 ----------------- dynaml/parser.go | 3 +-- dynaml/parser_test.go | 6 +++--- 3 files changed, 4 insertions(+), 22 deletions(-) delete mode 100644 dynaml/grouped.go diff --git a/dynaml/grouped.go b/dynaml/grouped.go deleted file mode 100644 index 636967f..0000000 --- a/dynaml/grouped.go +++ /dev/null @@ -1,17 +0,0 @@ -package dynaml - -import ( - "fmt" -) - -type GroupedExpr struct { - Expr Expression -} - -func (e GroupedExpr) String() string { - return fmt.Sprintf("( %s )", e.Expr) -} - -func (e GroupedExpr) Evaluate(binding Binding, locally bool) (interface{}, EvaluationInfo, bool) { - return e.Expr.Evaluate(binding, locally) -} diff --git a/dynaml/parser.go b/dynaml/parser.go index 720c88e..b50f845 100644 --- a/dynaml/parser.go +++ b/dynaml/parser.go @@ -77,8 +77,6 @@ func buildExpression(grammar *DynamlGrammar, path []string, stubPath []string) E tokens.Push(rhs) case rulePrefer: tokens.Push(PreferExpr{tokens.Pop()}) - case ruleGrouped: - tokens.Push(GroupedExpr{tokens.Pop()}) case ruleAuto: tokens.Push(AutoExpr{path}) case ruleMerge: @@ -290,6 +288,7 @@ func buildExpression(grammar *DynamlGrammar, path []string, stubPath []string) E tokens.SetExpressionList(tokens.PopExpressionList()) case ruleKey, ruleIndex: + case ruleGrouped: case ruleLevel0, ruleLevel1, ruleLevel2, ruleLevel3, ruleLevel4, ruleLevel5, ruleLevel6, ruleLevel7: case ruleExpression: case ruleMap: diff --git a/dynaml/parser_test.go b/dynaml/parser_test.go index eb2f86d..676f062 100644 --- a/dynaml/parser_test.go +++ b/dynaml/parser_test.go @@ -293,7 +293,7 @@ var _ = Describe("parsing", func() { parsesAs( `(foo)(1)`, CallExpr{ - GroupedExpr{ReferenceExpr{[]string{"foo"}}}, + ReferenceExpr{[]string{"foo"}}, []Expression{ IntegerExpr{1}, }, @@ -358,10 +358,10 @@ var _ = Describe("parsing", func() { parsesAs( `("foo" - bar) - merge`, SubtractionExpr{ - GroupedExpr{SubtractionExpr{ + SubtractionExpr{ StringExpr{"foo"}, ReferenceExpr{[]string{"bar"}}, - }}, + }, MergeExpr{}, }, ) From 44ee19f3477f2a12237a9ab555123c3b2b7e5268 Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Sun, 30 Dec 2018 20:59:26 +0100 Subject: [PATCH 07/13] improve multi document streams + multi document diff --- .gitignore | 2 + flow/cascade.go | 12 ++--- spiff++.go | 124 +++++++++++++++++++++++--------------------- spiff_test.go | 7 +-- yaml/parser.go | 32 +++++++++--- yaml/parser_test.go | 15 ++++++ 6 files changed, 116 insertions(+), 76 deletions(-) diff --git a/.gitignore b/.gitignore index 93a5a93..4cb46da 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,6 @@ tags +.idea +/local /spiff /spiff++ *.coverprofile diff --git a/flow/cascade.go b/flow/cascade.go index 99e0863..c9372be 100644 --- a/flow/cascade.go +++ b/flow/cascade.go @@ -8,12 +8,12 @@ func PrepareStubs(partial bool, stubs ...yaml.Node) ([]yaml.Node, error) { for i := len(stubs) - 1; i >= 0; i-- { flowed, err := Flow(stubs[i], stubs[i+1:]...) if !partial && err != nil { - return nil,err + return nil, err } stubs[i] = Cleanup(flowed, testLocal) } - return stubs,nil + return stubs, nil } func Apply(template yaml.Node, prepared []yaml.Node) (yaml.Node, error) { @@ -25,12 +25,12 @@ func Apply(template yaml.Node, prepared []yaml.Node) (yaml.Node, error) { } func Cascade(template yaml.Node, partial bool, stubs ...yaml.Node) (yaml.Node, error) { - prepared,err:=PrepareStubs(partial, stubs...) - if err!=nil { - return nil,err + prepared, err := PrepareStubs(partial, stubs...) + if err != nil { + return nil, err } - return Apply(template,prepared) + return Apply(template, prepared) } func testTemporary(node yaml.Node) bool { diff --git a/spiff++.go b/spiff++.go index fdd59dd..44cd967 100644 --- a/spiff++.go +++ b/spiff++.go @@ -89,7 +89,13 @@ func merge(templateFilePath string, partial bool, stubFilePaths []string) { log.Fatalln(fmt.Sprintf("error reading template [%s]:", path.Clean(templateFilePath)), err) } + templateYAMLs, err := yaml.ParseMulti(templateFilePath, templateFile) + if err != nil { + log.Fatalln(fmt.Sprintf("error parsing template [%s]:", path.Clean(templateFilePath)), err) + } + stubs := []yaml.Node{} + for _, stubFilePath := range stubFilePaths { var stubFile []byte var err error @@ -110,61 +116,39 @@ func merge(templateFilePath string, partial bool, stubFilePaths []string) { if err != nil { log.Fatalln(fmt.Sprintf("error parsing stub [%s]:", path.Clean(stubFilePath)), err) } + stubs = append(stubs, stubYAML) } + legend := "\nerror classification:\n" + + " *: error in local dynaml expression\n" + + " @: dependent of or involved in a cycle\n" + + " -: depending on a node with an error" + prepared, err := flow.PrepareStubs(partial, stubs...) if !partial && err != nil { - legend := "\nerror classification:\n" + - " *: error in local dynaml expression\n" + - " @: dependent of or involved in a cycle\n" + - " -: depending on a node with an error" - log.Fatalln("error generating manifest:", err, legend) + log.Fatalln("error generating manifest:", err, legend) } - docs := strings.Split(string(templateFile), "\n---\n") - out := "" - for i, doc := range docs { - if strings.Trim(doc," \n") == "" { - if i>0 && out!="" { - out += "---\n" - } - continue + for no, templateYAML := range templateYAMLs { + doc := "" + if len(templateYAMLs) > 1 { + doc = fmt.Sprintf(" (document %d)", no+1) } - suffix := "" - if len(docs) > 1 { - suffix = fmt.Sprintf(":%d", i+1) - } - templateYAML, err := yaml.Parse(templateFilePath+suffix, []byte(doc)) - if err != nil { - log.Fatalln(fmt.Sprintf("error parsing template [%s]:", path.Clean(templateFilePath)), err) - } - flowed, err := flow.Apply(templateYAML, prepared) if !partial && err != nil { - legend := "\nerror classification:\n" + - " *: error in local dynaml expression\n" + - " @: dependent of or involved in a cycle\n" + - " -: depending on a node with an error" - log.Fatalln("error generating manifest:", err, legend) + log.Fatalln(fmt.Sprintf("error generating manifest%s:", doc), err, legend) } if err != nil { flowed = dynaml.ResetUnresolvedNodes(flowed) } yaml, err := candiedyaml.Marshal(flowed) if err != nil { - log.Fatalln("error marshalling manifest:", err) - } - s:=string(yaml) - if i > 0 && out!="" { - out += "---\n" - } - out += s - if !strings.HasSuffix(s,"\n") { - out+="\n" + log.Fatalln(fmt.Sprintf("error marshalling manifest%s:", doc), err) } + fmt.Println("---") + fmt.Println(string(yaml)) } - fmt.Println(out) } func diff(aFilePath, bFilePath string, separator string) { @@ -173,7 +157,7 @@ func diff(aFilePath, bFilePath string, separator string) { log.Fatalln(fmt.Sprintf("error reading a [%s]:", path.Clean(aFilePath)), err) } - aYAML, err := yaml.Parse(aFilePath, aFile) + aYAMLs, err := yaml.ParseMulti(aFilePath, aFile) if err != nil { log.Fatalln(fmt.Sprintf("error parsing a [%s]:", path.Clean(aFilePath)), err) } @@ -183,39 +167,63 @@ func diff(aFilePath, bFilePath string, separator string) { log.Fatalln(fmt.Sprintf("error reading b [%s]:", path.Clean(bFilePath)), err) } - bYAML, err := yaml.Parse(bFilePath, bFile) + bYAMLs, err := yaml.ParseMulti(bFilePath, bFile) if err != nil { log.Fatalln(fmt.Sprintf("error parsing b [%s]:", path.Clean(bFilePath)), err) } - diffs := compare.Compare(aYAML, bYAML) + if len(aYAMLs) != len(bYAMLs) { + fmt.Printf("Different number of documents (%d != %d)\n", len(aYAMLs), len(bYAMLs)) + return + } - if len(diffs) == 0 { + ddiffs := make([][]compare.Diff, len(aYAMLs)) + found := false + for no, aYAML := range aYAMLs { + bYAML := bYAMLs[no] + ddiffs[no] = compare.Compare(aYAML, bYAML) + if len(ddiffs[no]) != 0 { + found = true + } + } + if !found { fmt.Println("no differences!") return } + for no := range aYAMLs { + if len(ddiffs[no]) == 0 { + if len(aYAMLs) > 1 { + fmt.Println("No difference in document %d", no+1) + } + } else { + diffs := ddiffs[no] + doc := "" + if len(aYAMLs) > 1 { + doc = fmt.Sprintf("document %d", no+1) + } + for _, diff := range diffs { + fmt.Println("Difference in", doc, strings.Join(diff.Path, ".")) - for _, diff := range diffs { - fmt.Println("Difference in", strings.Join(diff.Path, ".")) + if diff.A != nil { + ayaml, err := candiedyaml.Marshal(diff.A) + if err != nil { + panic(err) + } - if diff.A != nil { - ayaml, err := candiedyaml.Marshal(diff.A) - if err != nil { - panic(err) - } + fmt.Printf(" %s has:\n \x1b[31m%s\x1b[0m\n", aFilePath, strings.Replace(string(ayaml), "\n", "\n ", -1)) + } - fmt.Printf(" %s has:\n \x1b[31m%s\x1b[0m\n", aFilePath, strings.Replace(string(ayaml), "\n", "\n ", -1)) - } + if diff.B != nil { + byaml, err := candiedyaml.Marshal(diff.B) + if err != nil { + panic(err) + } - if diff.B != nil { - byaml, err := candiedyaml.Marshal(diff.B) - if err != nil { - panic(err) - } + fmt.Printf(" %s has:\n \x1b[32m%s\x1b[0m\n", bFilePath, strings.Replace(string(byaml), "\n", "\n ", -1)) + } - fmt.Printf(" %s has:\n \x1b[32m%s\x1b[0m\n", bFilePath, strings.Replace(string(byaml), "\n", "\n ", -1)) + fmt.Printf(separator) + } } - - fmt.Printf(separator) } } diff --git a/spiff_test.go b/spiff_test.go index b744044..f245fdd 100644 --- a/spiff_test.go +++ b/spiff_test.go @@ -39,8 +39,6 @@ var _ = Describe("Running spiff", func() { basicTemplate.Write([]byte(` --- foo: bar ---- -alice: bob `)) merge, err = Start(exec.Command(spiff, "merge", basicTemplate.Name()), GinkgoWriter, GinkgoWriter) Expect(err).NotTo(HaveOccurred()) @@ -52,10 +50,7 @@ alice: bob It("resolves the template and prints it out", func() { Expect(merge.Wait()).To(Exit(0)) - Expect(merge.Out).To(Say(`foo: bar ---- -alice: bob -`)) + Expect(merge.Out).To(Say(`foo: bar`)) }) }) }) diff --git a/yaml/parser.go b/yaml/parser.go index 0b4096c..3939fbd 100644 --- a/yaml/parser.go +++ b/yaml/parser.go @@ -1,11 +1,11 @@ package yaml import ( + "bytes" "errors" "fmt" - "reflect" - "github.com/cloudfoundry-incubator/candiedyaml" + "reflect" ) type NonStringKeyError struct { @@ -17,14 +17,34 @@ func (e NonStringKeyError) Error() string { } func Parse(sourceName string, source []byte) (Node, error) { - var parsed interface{} - - err := candiedyaml.Unmarshal(source, &parsed) + docs, err := ParseMulti(sourceName, source) if err != nil { return nil, err } + if len(docs) > 1 { + return nil, fmt.Errorf("multi document not possible") + } + return docs[0], err +} + +func ParseMulti(sourceName string, source []byte) ([]Node, error) { + docs := []Node{} + r := bytes.NewBuffer(source) + d := candiedyaml.NewDecoder(r) - return sanitize(sourceName, parsed) + for d.HasNext() { + var parsed interface{} + err := d.Decode(&parsed) + if err != nil { + return nil, err + } + n, err := sanitize(sourceName, parsed) + if err != nil { + return nil, err + } + docs = append(docs, n) + } + return docs, nil } func sanitize(sourceName string, root interface{}) (Node, error) { diff --git a/yaml/parser_test.go b/yaml/parser_test.go index 76297ed..06179c2 100644 --- a/yaml/parser_test.go +++ b/yaml/parser_test.go @@ -64,6 +64,21 @@ var _ = Describe("YAML Parser", func() { Expect(err.Error()).To(ContainSubstring("unknown type")) }) }) + + Context("parsing multi documents", func() { + It("returns all documents", func() { + sourceName := "test" + source := []byte(` +doc1: +--- +doc2: +`) + + docs, err := ParseMulti(sourceName, source) + Expect(err).NotTo(HaveOccurred()) + Expect(len(docs)).To(Equal(2)) + }) + }) }) func parsesAs(source string, expr interface{}) { From a2d3561a48f9e77f3c9ab0ab10536864afa66fea Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Sun, 30 Dec 2018 23:24:55 +0100 Subject: [PATCH 08/13] include discontinued candiedyaml as vendor --- Godeps/Godeps.json | 4 --- README.md | 27 ++++++++++++++----- .../cloudfoundry-incubator/candiedyaml | 1 + 3 files changed, 21 insertions(+), 11 deletions(-) create mode 160000 vendor/github.com/cloudfoundry-incubator/candiedyaml diff --git a/Godeps/Godeps.json b/Godeps/Godeps.json index 5dd4bb5..833a11f 100644 --- a/Godeps/Godeps.json +++ b/Godeps/Godeps.json @@ -2,10 +2,6 @@ "ImportPath": "github.com/cloudfoundry-incubator/spiff", "GoVersion": "go1.4.1", "Deps": [ - { - "ImportPath": "github.com/cloudfoundry-incubator/candiedyaml", - "Rev": "4e924c79e32959414e8c7aaed6607176d8ee79c3" - }, { "ImportPath": "github.com/codegangsta/cli", "Comment": "1.2.0-95-g9b2bd2b", diff --git a/README.md b/README.md index 0138fd9..3876c37 100644 --- a/README.md +++ b/README.md @@ -140,17 +140,30 @@ Example: spiff merge cf-release/templates/cf-deployment.yml my-cloud-stub.yml ``` -The ` merge` command offers the option `--partial`. If this option is given spiff handles incomplete expression evaluation. All errors are ignored and the unresolvable parts of the yaml document are returned as strings. - -It is possible to read one file from standard input by using the file name `-`. It may be used only once. This allows using spiff as part of a pipeline to just process a single stream or to process a stream based on several templates/stubs. - -The template file (first argument) may be a multi document file containing multiple YAML documents -separated by a line containing only `---`. Each YAML document will be -processed independently with the given stub files. +The ` merge` command offers the option `--partial`. If this option is +given spiff handles incomplete expression evaluation. All errors are ignored +and the unresolvable parts of the yaml document are returned as strings. + +It is possible to read one file from standard input by using the file +name `-`. It may be used only once. This allows using spiff as part of a +pipeline to just process a single stream or to process a stream based on +several templates/stubs. + +The template file (first argument) may be a multiple document stream +containing multiple YAML documents separated by a line containing only `---`. +Each YAML document will be processed independently with the given stub files. +The result is the stream of processed documents in the same order. +For example, this can be used to generate *kubernetes* manifests to be used +by `kubectl`. ### `spiff diff manifest.yml other-manifest.yml` Show structural differences between two deployment manifests. +Here streams with multiple documents are supported, also. +To indicate no difference the number of documents in both streams must be +identical and each document in the first stream must have no difference +compared to the document with the same index in the second stream. +Found differences are shown for each document separately. Unlike basic diffing tools and even `bosh diff`, this command has semantic knowledge of a deployment manifest, and is not just text-based. For example, diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml new file mode 160000 index 0000000..4e924c7 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml @@ -0,0 +1 @@ +Subproject commit 4e924c79e32959414e8c7aaed6607176d8ee79c3 From 1ca69790724147d4e6cc2005f28566a913593d0e Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Sun, 30 Dec 2018 23:35:57 +0100 Subject: [PATCH 09/13] go1.10.4 --- .travis.yml | 2 +- .../cloudfoundry-incubator/candiedyaml | 1 - .../candiedyaml/.gitignore | 1 + .../candiedyaml/.travis.yml | 12 + .../candiedyaml/LICENSE | 203 + .../candiedyaml/README.md | 57 + .../cloudfoundry-incubator/candiedyaml/api.go | 834 +++++ .../candiedyaml/candiedyaml_suite_test.go | 27 + .../candiedyaml/decode.go | 626 ++++ .../candiedyaml/decode_test.go | 906 +++++ .../candiedyaml/emitter.go | 2072 ++++++++++ .../candiedyaml/encode.go | 395 ++ .../candiedyaml/encode_test.go | 634 ++++ .../fixtures/specification/example2_1.yaml | 3 + .../fixtures/specification/example2_10.yaml | 8 + .../fixtures/specification/example2_11.yaml | 9 + .../fixtures/specification/example2_12.yaml | 8 + .../fixtures/specification/example2_13.yaml | 4 + .../fixtures/specification/example2_14.yaml | 4 + .../fixtures/specification/example2_15.yaml | 8 + .../specification/example2_15_dumped.yaml | 7 + .../fixtures/specification/example2_16.yaml | 7 + .../fixtures/specification/example2_17.yaml | 7 + .../specification/example2_17_control.yaml | 2 + .../specification/example2_17_hexesc.yaml | 2 + .../specification/example2_17_quoted.yaml | 2 + .../specification/example2_17_single.yaml | 1 + .../example2_17_tie_fighter.yaml | 1 + .../specification/example2_17_unicode.yaml | 2 + .../fixtures/specification/example2_18.yaml | 6 + .../fixtures/specification/example2_19.yaml | 4 + .../fixtures/specification/example2_2.yaml | 3 + .../fixtures/specification/example2_20.yaml | 5 + .../fixtures/specification/example2_21.yaml | 4 + .../fixtures/specification/example2_22.yaml | 4 + .../fixtures/specification/example2_23.yaml | 14 + .../example2_23_application.yaml | 5 + .../specification/example2_23_non_date.yaml | 3 + .../specification/example2_23_picture.yaml | 9 + .../fixtures/specification/example2_24.yaml | 14 + .../specification/example2_24_dumped.yaml | 11 + .../fixtures/specification/example2_25.yaml | 7 + .../fixtures/specification/example2_26.yaml | 7 + .../fixtures/specification/example2_27.yaml | 29 + .../specification/example2_27_dumped.yaml | 20 + .../fixtures/specification/example2_28.yaml | 29 + .../fixtures/specification/example2_3.yaml | 8 + .../fixtures/specification/example2_4.yaml | 8 + .../fixtures/specification/example2_5.yaml | 3 + .../fixtures/specification/example2_6.yaml | 5 + .../fixtures/specification/example2_7.yaml | 10 + .../fixtures/specification/example2_8.yaml | 10 + .../fixtures/specification/example2_9.yaml | 8 + .../fixtures/specification/example_empty.yaml | 0 .../fixtures/specification/types/map.yaml | 6 + .../specification/types/map_mixed_tags.yaml | 6 + .../fixtures/specification/types/merge.yaml | 27 + .../fixtures/specification/types/omap.yaml | 8 + .../fixtures/specification/types/pairs.yaml | 7 + .../fixtures/specification/types/seq.yaml | 14 + .../fixtures/specification/types/set.yaml | 7 + .../fixtures/specification/types/v.yaml | 4 + .../fixtures/specification/types/value.yaml | 10 + .../candiedyaml/libyaml-LICENSE | 19 + .../candiedyaml/parser.go | 1230 ++++++ .../candiedyaml/parser_test.go | 81 + .../candiedyaml/reader.go | 465 +++ .../candiedyaml/reader_test.go | 291 ++ .../candiedyaml/resolver.go | 449 +++ .../candiedyaml/resolver_test.go | 665 ++++ .../candiedyaml/run_parser.go | 62 + .../candiedyaml/scanner.go | 3318 +++++++++++++++++ .../candiedyaml/scanner_test.go | 80 + .../candiedyaml/tags.go | 360 ++ .../candiedyaml/writer.go | 128 + .../candiedyaml/yaml_definesh.go | 22 + .../candiedyaml/yaml_privateh.go | 891 +++++ .../candiedyaml/yamlh.go | 953 +++++ 78 files changed, 15172 insertions(+), 2 deletions(-) delete mode 160000 vendor/github.com/cloudfoundry-incubator/candiedyaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/.gitignore create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/.travis.yml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/LICENSE create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/README.md create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/api.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/candiedyaml_suite_test.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/decode.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/decode_test.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/emitter.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/encode.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/encode_test.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_1.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_10.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_11.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_12.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_13.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_14.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_15.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_15_dumped.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_16.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_control.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_hexesc.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_quoted.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_single.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_tie_fighter.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_unicode.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_18.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_19.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_2.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_20.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_21.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_22.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_application.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_non_date.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_picture.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_24.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_24_dumped.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_25.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_26.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_27.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_27_dumped.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_28.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_3.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_4.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_5.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_6.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_7.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_8.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_9.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example_empty.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/map.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/map_mixed_tags.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/merge.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/omap.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/pairs.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/seq.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/set.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/v.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/value.yaml create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/libyaml-LICENSE create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/parser.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/parser_test.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/reader.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/reader_test.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/resolver.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/resolver_test.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/run_parser.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/scanner.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/scanner_test.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/tags.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/writer.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/yaml_definesh.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/yaml_privateh.go create mode 100644 vendor/github.com/cloudfoundry-incubator/candiedyaml/yamlh.go diff --git a/.travis.yml b/.travis.yml index 2f53c14..c523b92 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ language: go go: - - 1.7 + - 1.10.4 install: - go get -v github.com/kr/godep diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml deleted file mode 160000 index 4e924c7..0000000 --- a/vendor/github.com/cloudfoundry-incubator/candiedyaml +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 4e924c79e32959414e8c7aaed6607176d8ee79c3 diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/.gitignore b/vendor/github.com/cloudfoundry-incubator/candiedyaml/.gitignore new file mode 100644 index 0000000..7823778 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/.gitignore @@ -0,0 +1 @@ +*.coverprofile diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/.travis.yml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/.travis.yml new file mode 100644 index 0000000..b39955e --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/.travis.yml @@ -0,0 +1,12 @@ +language: go + +go: + - 1.4.1 + +install: + - go get -t -v ./... + - go install github.com/onsi/ginkgo/ginkgo + +script: + - export PATH=$HOME/gopath/bin:$PATH + - ginkgo -r -failOnPending -randomizeAllSpecs -race diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/LICENSE b/vendor/github.com/cloudfoundry-incubator/candiedyaml/LICENSE new file mode 100644 index 0000000..f4f87bd --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/LICENSE @@ -0,0 +1,203 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + \ No newline at end of file diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/README.md b/vendor/github.com/cloudfoundry-incubator/candiedyaml/README.md new file mode 100644 index 0000000..266c28c --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/README.md @@ -0,0 +1,57 @@ +[![Build Status](https://travis-ci.org/cloudfoundry-incubator/candiedyaml.svg)](https://travis-ci.org/cloudfoundry-incubator/candiedyaml) + +candiedyaml +=========== + +YAML for Go + +A YAML 1.1 parser with support for YAML 1.2 features + +Usage +----- + +```go +package myApp + +import ( + "github.com/cloudfoundry-incubator/candiedyaml" + "fmt" + "os" +) + +func main() { + file, err := os.Open("path/to/some/file.yml") + if err != nil { + println("File does not exist:", err.Error()) + os.Exit(1) + } + defer file.Close() + + document := new(interface{}) + decoder := candiedyaml.NewDecoder(file) + err = decoder.Decode(document) + + if err != nil { + println("Failed to decode document:", err.Error()) + } + + println("parsed yml into interface:", fmt.Sprintf("%#v", document)) + + fileToWrite, err := os.Create("path/to/some/new/file.yml") + if err != nil { + println("Failed to open file for writing:", err.Error()) + os.Exit(1) + } + defer fileToWrite.Close() + + encoder := candiedyaml.NewEncoder(fileToWrite) + err = encoder.Encode(document) + + if err != nil { + println("Failed to encode document:", err.Error()) + os.Exit(1) + } + + return +} +``` diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/api.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/api.go new file mode 100644 index 0000000..87c1043 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/api.go @@ -0,0 +1,834 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "io" +) + +/* + * Create a new parser object. + */ + +func yaml_parser_initialize(parser *yaml_parser_t) bool { + *parser = yaml_parser_t{ + raw_buffer: make([]byte, 0, INPUT_RAW_BUFFER_SIZE), + buffer: make([]byte, 0, INPUT_BUFFER_SIZE), + } + + return true +} + +/* + * Destroy a parser object. + */ +func yaml_parser_delete(parser *yaml_parser_t) { + *parser = yaml_parser_t{} +} + +/* + * String read handler. + */ + +func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (int, error) { + if parser.input_pos == len(parser.input) { + return 0, io.EOF + } + + n := copy(buffer, parser.input[parser.input_pos:]) + parser.input_pos += n + return n, nil +} + +/* + * File read handler. + */ + +func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (int, error) { + return parser.input_reader.Read(buffer) +} + +/* + * Set a string input. + */ + +func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { + if parser.read_handler != nil { + panic("input already set") + } + + parser.read_handler = yaml_string_read_handler + + parser.input = input + parser.input_pos = 0 +} + +/* + * Set a reader input + */ +func yaml_parser_set_input_reader(parser *yaml_parser_t, reader io.Reader) { + if parser.read_handler != nil { + panic("input already set") + } + + parser.read_handler = yaml_file_read_handler + parser.input_reader = reader +} + +/* + * Set a generic input. + */ + +func yaml_parser_set_input(parser *yaml_parser_t, handler yaml_read_handler_t) { + if parser.read_handler != nil { + panic("input already set") + } + + parser.read_handler = handler +} + +/* + * Set the source encoding. + */ + +func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { + if parser.encoding != yaml_ANY_ENCODING { + panic("encoding already set") + } + + parser.encoding = encoding +} + +/* + * Create a new emitter object. + */ + +func yaml_emitter_initialize(emitter *yaml_emitter_t) { + *emitter = yaml_emitter_t{ + buffer: make([]byte, OUTPUT_BUFFER_SIZE), + raw_buffer: make([]byte, 0, OUTPUT_RAW_BUFFER_SIZE), + states: make([]yaml_emitter_state_t, 0, INITIAL_STACK_SIZE), + events: make([]yaml_event_t, 0, INITIAL_QUEUE_SIZE), + } +} + +func yaml_emitter_delete(emitter *yaml_emitter_t) { + *emitter = yaml_emitter_t{} +} + +/* + * String write handler. + */ + +func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { + *emitter.output_buffer = append(*emitter.output_buffer, buffer...) + return nil +} + +/* + * File write handler. + */ + +func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error { + _, err := emitter.output_writer.Write(buffer) + return err +} + +/* + * Set a string output. + */ + +func yaml_emitter_set_output_string(emitter *yaml_emitter_t, buffer *[]byte) { + if emitter.write_handler != nil { + panic("output already set") + } + + emitter.write_handler = yaml_string_write_handler + emitter.output_buffer = buffer +} + +/* + * Set a file output. + */ + +func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) { + if emitter.write_handler != nil { + panic("output already set") + } + + emitter.write_handler = yaml_writer_write_handler + emitter.output_writer = w +} + +/* + * Set a generic output handler. + */ + +func yaml_emitter_set_output(emitter *yaml_emitter_t, handler yaml_write_handler_t) { + if emitter.write_handler != nil { + panic("output already set") + } + + emitter.write_handler = handler +} + +/* + * Set the output encoding. + */ + +func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { + if emitter.encoding != yaml_ANY_ENCODING { + panic("encoding already set") + } + + emitter.encoding = encoding +} + +/* + * Set the canonical output style. + */ + +func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { + emitter.canonical = canonical +} + +/* + * Set the indentation increment. + */ + +func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { + if indent < 2 || indent > 9 { + indent = 2 + } + emitter.best_indent = indent +} + +/* + * Set the preferred line width. + */ + +func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { + if width < 0 { + width = -1 + } + emitter.best_width = width +} + +/* + * Set if unescaped non-ASCII characters are allowed. + */ + +func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { + emitter.unicode = unicode +} + +/* + * Set the preferred line break character. + */ + +func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { + emitter.line_break = line_break +} + +/* + * Destroy a token object. + */ + +// yaml_DECLARE(void) +// yaml_token_delete(yaml_token_t *token) +// { +// assert(token); /* Non-NULL token object expected. */ +// +// switch (token.type) +// { +// case yaml_TAG_DIRECTIVE_TOKEN: +// yaml_free(token.data.tag_directive.handle); +// yaml_free(token.data.tag_directive.prefix); +// break; +// +// case yaml_ALIAS_TOKEN: +// yaml_free(token.data.alias.value); +// break; +// +// case yaml_ANCHOR_TOKEN: +// yaml_free(token.data.anchor.value); +// break; +// +// case yaml_TAG_TOKEN: +// yaml_free(token.data.tag.handle); +// yaml_free(token.data.tag.suffix); +// break; +// +// case yaml_SCALAR_TOKEN: +// yaml_free(token.data.scalar.value); +// break; +// +// default: +// break; +// } +// +// memset(token, 0, sizeof(yaml_token_t)); +// } + +/* + * Check if a string is a valid UTF-8 sequence. + * + * Check 'reader.c' for more details on UTF-8 encoding. + */ + +// static int +// yaml_check_utf8(yaml_char_t *start, size_t length) +// { +// yaml_char_t *end = start+length; +// yaml_char_t *pointer = start; +// +// while (pointer < end) { +// unsigned char octet; +// unsigned int width; +// unsigned int value; +// size_t k; +// +// octet = pointer[0]; +// width = (octet & 0x80) == 0x00 ? 1 : +// (octet & 0xE0) == 0xC0 ? 2 : +// (octet & 0xF0) == 0xE0 ? 3 : +// (octet & 0xF8) == 0xF0 ? 4 : 0; +// value = (octet & 0x80) == 0x00 ? octet & 0x7F : +// (octet & 0xE0) == 0xC0 ? octet & 0x1F : +// (octet & 0xF0) == 0xE0 ? octet & 0x0F : +// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; +// if (!width) return 0; +// if (pointer+width > end) return 0; +// for (k = 1; k < width; k ++) { +// octet = pointer[k]; +// if ((octet & 0xC0) != 0x80) return 0; +// value = (value << 6) + (octet & 0x3F); +// } +// if (!((width == 1) || +// (width == 2 && value >= 0x80) || +// (width == 3 && value >= 0x800) || +// (width == 4 && value >= 0x10000))) return 0; +// +// pointer += width; +// } +// +// return 1; +// } + +/* + * Create STREAM-START. + */ + +func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) { + *event = yaml_event_t{ + event_type: yaml_STREAM_START_EVENT, + encoding: encoding, + } +} + +/* + * Create STREAM-END. + */ + +func yaml_stream_end_event_initialize(event *yaml_event_t) { + *event = yaml_event_t{ + event_type: yaml_STREAM_END_EVENT, + } +} + +/* + * Create DOCUMENT-START. + */ + +func yaml_document_start_event_initialize(event *yaml_event_t, + version_directive *yaml_version_directive_t, + tag_directives []yaml_tag_directive_t, + implicit bool) { + *event = yaml_event_t{ + event_type: yaml_DOCUMENT_START_EVENT, + version_directive: version_directive, + tag_directives: tag_directives, + implicit: implicit, + } +} + +/* + * Create DOCUMENT-END. + */ + +func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) { + *event = yaml_event_t{ + event_type: yaml_DOCUMENT_END_EVENT, + implicit: implicit, + } +} + +/* + * Create ALIAS. + */ + +func yaml_alias_event_initialize(event *yaml_event_t, anchor []byte) { + *event = yaml_event_t{ + event_type: yaml_ALIAS_EVENT, + anchor: anchor, + } +} + +/* + * Create SCALAR. + */ + +func yaml_scalar_event_initialize(event *yaml_event_t, + anchor []byte, tag []byte, + value []byte, + plain_implicit bool, quoted_implicit bool, + style yaml_scalar_style_t) { + + *event = yaml_event_t{ + event_type: yaml_SCALAR_EVENT, + anchor: anchor, + tag: tag, + value: value, + implicit: plain_implicit, + quoted_implicit: quoted_implicit, + style: yaml_style_t(style), + } +} + +/* + * Create SEQUENCE-START. + */ + +func yaml_sequence_start_event_initialize(event *yaml_event_t, + anchor []byte, tag []byte, implicit bool, style yaml_sequence_style_t) { + *event = yaml_event_t{ + event_type: yaml_SEQUENCE_START_EVENT, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(style), + } +} + +/* + * Create SEQUENCE-END. + */ + +func yaml_sequence_end_event_initialize(event *yaml_event_t) { + *event = yaml_event_t{ + event_type: yaml_SEQUENCE_END_EVENT, + } +} + +/* + * Create MAPPING-START. + */ + +func yaml_mapping_start_event_initialize(event *yaml_event_t, + anchor []byte, tag []byte, implicit bool, style yaml_mapping_style_t) { + *event = yaml_event_t{ + event_type: yaml_MAPPING_START_EVENT, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(style), + } +} + +/* + * Create MAPPING-END. + */ + +func yaml_mapping_end_event_initialize(event *yaml_event_t) { + *event = yaml_event_t{ + event_type: yaml_MAPPING_END_EVENT, + } +} + +/* + * Destroy an event object. + */ + +func yaml_event_delete(event *yaml_event_t) { + *event = yaml_event_t{} +} + +// /* +// * Create a document object. +// */ +// +// func yaml_document_initialize(document *yaml_document_t, +// version_directive *yaml_version_directive_t, +// tag_directives []yaml_tag_directive_t, +// start_implicit, end_implicit bool) bool { +// +// +// { +// struct { +// YAML_error_type_t error; +// } context; +// struct { +// yaml_node_t *start; +// yaml_node_t *end; +// yaml_node_t *top; +// } nodes = { NULL, NULL, NULL }; +// yaml_version_directive_t *version_directive_copy = NULL; +// struct { +// yaml_tag_directive_t *start; +// yaml_tag_directive_t *end; +// yaml_tag_directive_t *top; +// } tag_directives_copy = { NULL, NULL, NULL }; +// yaml_tag_directive_t value = { NULL, NULL }; +// YAML_mark_t mark = { 0, 0, 0 }; +// +// assert(document); /* Non-NULL document object is expected. */ +// assert((tag_directives_start && tag_directives_end) || +// (tag_directives_start == tag_directives_end)); +// /* Valid tag directives are expected. */ +// +// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error; +// +// if (version_directive) { +// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)); +// if (!version_directive_copy) goto error; +// version_directive_copy.major = version_directive.major; +// version_directive_copy.minor = version_directive.minor; +// } +// +// if (tag_directives_start != tag_directives_end) { +// yaml_tag_directive_t *tag_directive; +// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) +// goto error; +// for (tag_directive = tag_directives_start; +// tag_directive != tag_directives_end; tag_directive ++) { +// assert(tag_directive.handle); +// assert(tag_directive.prefix); +// if (!yaml_check_utf8(tag_directive.handle, +// strlen((char *)tag_directive.handle))) +// goto error; +// if (!yaml_check_utf8(tag_directive.prefix, +// strlen((char *)tag_directive.prefix))) +// goto error; +// value.handle = yaml_strdup(tag_directive.handle); +// value.prefix = yaml_strdup(tag_directive.prefix); +// if (!value.handle || !value.prefix) goto error; +// if (!PUSH(&context, tag_directives_copy, value)) +// goto error; +// value.handle = NULL; +// value.prefix = NULL; +// } +// } +// +// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, +// tag_directives_copy.start, tag_directives_copy.top, +// start_implicit, end_implicit, mark, mark); +// +// return 1; +// +// error: +// STACK_DEL(&context, nodes); +// yaml_free(version_directive_copy); +// while (!STACK_EMPTY(&context, tag_directives_copy)) { +// yaml_tag_directive_t value = POP(&context, tag_directives_copy); +// yaml_free(value.handle); +// yaml_free(value.prefix); +// } +// STACK_DEL(&context, tag_directives_copy); +// yaml_free(value.handle); +// yaml_free(value.prefix); +// +// return 0; +// } +// +// /* +// * Destroy a document object. +// */ +// +// yaml_DECLARE(void) +// yaml_document_delete(document *yaml_document_t) +// { +// struct { +// YAML_error_type_t error; +// } context; +// yaml_tag_directive_t *tag_directive; +// +// context.error = yaml_NO_ERROR; /* Eliminate a compliler warning. */ +// +// assert(document); /* Non-NULL document object is expected. */ +// +// while (!STACK_EMPTY(&context, document.nodes)) { +// yaml_node_t node = POP(&context, document.nodes); +// yaml_free(node.tag); +// switch (node.type) { +// case yaml_SCALAR_NODE: +// yaml_free(node.data.scalar.value); +// break; +// case yaml_SEQUENCE_NODE: +// STACK_DEL(&context, node.data.sequence.items); +// break; +// case yaml_MAPPING_NODE: +// STACK_DEL(&context, node.data.mapping.pairs); +// break; +// default: +// assert(0); /* Should not happen. */ +// } +// } +// STACK_DEL(&context, document.nodes); +// +// yaml_free(document.version_directive); +// for (tag_directive = document.tag_directives.start; +// tag_directive != document.tag_directives.end; +// tag_directive++) { +// yaml_free(tag_directive.handle); +// yaml_free(tag_directive.prefix); +// } +// yaml_free(document.tag_directives.start); +// +// memset(document, 0, sizeof(yaml_document_t)); +// } +// +// /** +// * Get a document node. +// */ +// +// yaml_DECLARE(yaml_node_t *) +// yaml_document_get_node(document *yaml_document_t, int index) +// { +// assert(document); /* Non-NULL document object is expected. */ +// +// if (index > 0 && document.nodes.start + index <= document.nodes.top) { +// return document.nodes.start + index - 1; +// } +// return NULL; +// } +// +// /** +// * Get the root object. +// */ +// +// yaml_DECLARE(yaml_node_t *) +// yaml_document_get_root_node(document *yaml_document_t) +// { +// assert(document); /* Non-NULL document object is expected. */ +// +// if (document.nodes.top != document.nodes.start) { +// return document.nodes.start; +// } +// return NULL; +// } +// +// /* +// * Add a scalar node to a document. +// */ +// +// yaml_DECLARE(int) +// yaml_document_add_scalar(document *yaml_document_t, +// yaml_char_t *tag, yaml_char_t *value, int length, +// yaml_scalar_style_t style) +// { +// struct { +// YAML_error_type_t error; +// } context; +// YAML_mark_t mark = { 0, 0, 0 }; +// yaml_char_t *tag_copy = NULL; +// yaml_char_t *value_copy = NULL; +// yaml_node_t node; +// +// assert(document); /* Non-NULL document object is expected. */ +// assert(value); /* Non-NULL value is expected. */ +// +// if (!tag) { +// tag = (yaml_char_t *)yaml_DEFAULT_SCALAR_TAG; +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error; +// tag_copy = yaml_strdup(tag); +// if (!tag_copy) goto error; +// +// if (length < 0) { +// length = strlen((char *)value); +// } +// +// if (!yaml_check_utf8(value, length)) goto error; +// value_copy = yaml_malloc(length+1); +// if (!value_copy) goto error; +// memcpy(value_copy, value, length); +// value_copy[length] = '\0'; +// +// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark); +// if (!PUSH(&context, document.nodes, node)) goto error; +// +// return document.nodes.top - document.nodes.start; +// +// error: +// yaml_free(tag_copy); +// yaml_free(value_copy); +// +// return 0; +// } +// +// /* +// * Add a sequence node to a document. +// */ +// +// yaml_DECLARE(int) +// yaml_document_add_sequence(document *yaml_document_t, +// yaml_char_t *tag, yaml_sequence_style_t style) +// { +// struct { +// YAML_error_type_t error; +// } context; +// YAML_mark_t mark = { 0, 0, 0 }; +// yaml_char_t *tag_copy = NULL; +// struct { +// yaml_node_item_t *start; +// yaml_node_item_t *end; +// yaml_node_item_t *top; +// } items = { NULL, NULL, NULL }; +// yaml_node_t node; +// +// assert(document); /* Non-NULL document object is expected. */ +// +// if (!tag) { +// tag = (yaml_char_t *)yaml_DEFAULT_SEQUENCE_TAG; +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error; +// tag_copy = yaml_strdup(tag); +// if (!tag_copy) goto error; +// +// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error; +// +// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, +// style, mark, mark); +// if (!PUSH(&context, document.nodes, node)) goto error; +// +// return document.nodes.top - document.nodes.start; +// +// error: +// STACK_DEL(&context, items); +// yaml_free(tag_copy); +// +// return 0; +// } +// +// /* +// * Add a mapping node to a document. +// */ +// +// yaml_DECLARE(int) +// yaml_document_add_mapping(document *yaml_document_t, +// yaml_char_t *tag, yaml_mapping_style_t style) +// { +// struct { +// YAML_error_type_t error; +// } context; +// YAML_mark_t mark = { 0, 0, 0 }; +// yaml_char_t *tag_copy = NULL; +// struct { +// yaml_node_pair_t *start; +// yaml_node_pair_t *end; +// yaml_node_pair_t *top; +// } pairs = { NULL, NULL, NULL }; +// yaml_node_t node; +// +// assert(document); /* Non-NULL document object is expected. */ +// +// if (!tag) { +// tag = (yaml_char_t *)yaml_DEFAULT_MAPPING_TAG; +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error; +// tag_copy = yaml_strdup(tag); +// if (!tag_copy) goto error; +// +// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error; +// +// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, +// style, mark, mark); +// if (!PUSH(&context, document.nodes, node)) goto error; +// +// return document.nodes.top - document.nodes.start; +// +// error: +// STACK_DEL(&context, pairs); +// yaml_free(tag_copy); +// +// return 0; +// } +// +// /* +// * Append an item to a sequence node. +// */ +// +// yaml_DECLARE(int) +// yaml_document_append_sequence_item(document *yaml_document_t, +// int sequence, int item) +// { +// struct { +// YAML_error_type_t error; +// } context; +// +// assert(document); /* Non-NULL document is required. */ +// assert(sequence > 0 +// && document.nodes.start + sequence <= document.nodes.top); +// /* Valid sequence id is required. */ +// assert(document.nodes.start[sequence-1].type == yaml_SEQUENCE_NODE); +// /* A sequence node is required. */ +// assert(item > 0 && document.nodes.start + item <= document.nodes.top); +// /* Valid item id is required. */ +// +// if (!PUSH(&context, +// document.nodes.start[sequence-1].data.sequence.items, item)) +// return 0; +// +// return 1; +// } +// +// /* +// * Append a pair of a key and a value to a mapping node. +// */ +// +// yaml_DECLARE(int) +// yaml_document_append_mapping_pair(document *yaml_document_t, +// int mapping, int key, int value) +// { +// struct { +// YAML_error_type_t error; +// } context; +// +// yaml_node_pair_t pair; +// +// assert(document); /* Non-NULL document is required. */ +// assert(mapping > 0 +// && document.nodes.start + mapping <= document.nodes.top); +// /* Valid mapping id is required. */ +// assert(document.nodes.start[mapping-1].type == yaml_MAPPING_NODE); +// /* A mapping node is required. */ +// assert(key > 0 && document.nodes.start + key <= document.nodes.top); +// /* Valid key id is required. */ +// assert(value > 0 && document.nodes.start + value <= document.nodes.top); +// /* Valid value id is required. */ +// +// pair.key = key; +// pair.value = value; +// +// if (!PUSH(&context, +// document.nodes.start[mapping-1].data.mapping.pairs, pair)) +// return 0; +// +// return 1; +// } +// diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/candiedyaml_suite_test.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/candiedyaml_suite_test.go new file mode 100644 index 0000000..0b97fe9 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/candiedyaml_suite_test.go @@ -0,0 +1,27 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestCandiedyaml(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Candiedyaml Suite") +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/decode.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/decode.go new file mode 100644 index 0000000..c041bbe --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/decode.go @@ -0,0 +1,626 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "bytes" + "errors" + "fmt" + "io" + "reflect" + "runtime" + "strconv" + "strings" +) + +type Unmarshaler interface { + UnmarshalYAML(tag string, value interface{}) error +} + +// A Number represents a JSON number literal. +type Number string + +// String returns the literal text of the number. +func (n Number) String() string { return string(n) } + +// Float64 returns the number as a float64. +func (n Number) Float64() (float64, error) { + return strconv.ParseFloat(string(n), 64) +} + +// Int64 returns the number as an int64. +func (n Number) Int64() (int64, error) { + return strconv.ParseInt(string(n), 10, 64) +} + +type Decoder struct { + parser yaml_parser_t + event yaml_event_t + replay_events []yaml_event_t + useNumber bool + + anchors map[string][]yaml_event_t + tracking_anchors [][]yaml_event_t +} + +type ParserError struct { + ErrorType YAML_error_type_t + Context string + ContextMark YAML_mark_t + Problem string + ProblemMark YAML_mark_t +} + +func (e *ParserError) Error() string { + return fmt.Sprintf("yaml: [%s] %s at line %d, column %d", e.Context, e.Problem, e.ProblemMark.line+1, e.ProblemMark.column+1) +} + +type UnexpectedEventError struct { + Value string + EventType yaml_event_type_t + At YAML_mark_t +} + +func (e *UnexpectedEventError) Error() string { + return fmt.Sprintf("yaml: Unexpect event [%d]: '%s' at line %d, column %d", e.EventType, e.Value, e.At.line+1, e.At.column+1) +} + +func recovery(err *error) { + if r := recover(); r != nil { + if _, ok := r.(runtime.Error); ok { + panic(r) + } + + var tmpError error + switch r := r.(type) { + case error: + tmpError = r + case string: + tmpError = errors.New(r) + default: + tmpError = errors.New("Unknown panic: " + reflect.ValueOf(r).String()) + } + + *err = tmpError + } +} + +func Unmarshal(data []byte, v interface{}) error { + d := NewDecoder(bytes.NewBuffer(data)) + return d.Decode(v) +} + +func NewDecoder(r io.Reader) *Decoder { + d := &Decoder{ + anchors: make(map[string][]yaml_event_t), + tracking_anchors: make([][]yaml_event_t, 1), + } + yaml_parser_initialize(&d.parser) + yaml_parser_set_input_reader(&d.parser, r) + return d +} + +func (d *Decoder) Decode(v interface{}) (err error) { + defer recovery(&err) + + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Ptr || rv.IsNil() { + return fmt.Errorf("Expected a pointer or nil but was a %s at %s", rv.String(), d.event.start_mark) + } + + if d.event.event_type == yaml_NO_EVENT { + d.nextEvent() + + if d.event.event_type != yaml_STREAM_START_EVENT { + return errors.New("Invalid stream") + } + + d.nextEvent() + } + + d.document(rv) + return nil +} + +func (d *Decoder) UseNumber() { d.useNumber = true } + +func (d *Decoder) error(err error) { + panic(err) +} + +func (d *Decoder) nextEvent() { + if d.event.event_type == yaml_STREAM_END_EVENT { + d.error(errors.New("The stream is closed")) + } + + if d.replay_events != nil { + d.event = d.replay_events[0] + if len(d.replay_events) == 1 { + d.replay_events = nil + } else { + d.replay_events = d.replay_events[1:] + } + } else { + if !yaml_parser_parse(&d.parser, &d.event) { + yaml_event_delete(&d.event) + + d.error(&ParserError{ + ErrorType: d.parser.error, + Context: d.parser.context, + ContextMark: d.parser.context_mark, + Problem: d.parser.problem, + ProblemMark: d.parser.problem_mark, + }) + } + } + + last := len(d.tracking_anchors) + // skip aliases when tracking an anchor + if last > 0 && d.event.event_type != yaml_ALIAS_EVENT { + d.tracking_anchors[last-1] = append(d.tracking_anchors[last-1], d.event) + } +} + +func (d *Decoder) HasNext() bool { + return d.event.event_type != yaml_STREAM_END_EVENT +} + +func (d *Decoder) document(rv reflect.Value) { + if d.event.event_type != yaml_DOCUMENT_START_EVENT { + d.error(fmt.Errorf("Expected document start at %s", d.event.start_mark)) + } + + d.nextEvent() + d.parse(rv) + + if d.event.event_type != yaml_DOCUMENT_END_EVENT { + d.error(fmt.Errorf("Expected document end at %s", d.event.start_mark)) + } + + d.nextEvent() +} + +func (d *Decoder) parse(rv reflect.Value) { + if !rv.IsValid() { + // skip ahead since we cannot store + d.valueInterface() + return + } + + anchor := string(d.event.anchor) + switch d.event.event_type { + case yaml_SEQUENCE_START_EVENT: + d.begin_anchor(anchor) + d.sequence(rv) + d.end_anchor(anchor) + case yaml_MAPPING_START_EVENT: + d.begin_anchor(anchor) + d.mapping(rv) + d.end_anchor(anchor) + case yaml_SCALAR_EVENT: + d.begin_anchor(anchor) + d.scalar(rv) + d.end_anchor(anchor) + case yaml_ALIAS_EVENT: + d.alias(rv) + case yaml_DOCUMENT_END_EVENT: + default: + d.error(&UnexpectedEventError{ + Value: string(d.event.value), + EventType: d.event.event_type, + At: d.event.start_mark, + }) + } +} + +func (d *Decoder) begin_anchor(anchor string) { + if anchor != "" { + events := []yaml_event_t{d.event} + d.tracking_anchors = append(d.tracking_anchors, events) + } +} + +func (d *Decoder) end_anchor(anchor string) { + if anchor != "" { + events := d.tracking_anchors[len(d.tracking_anchors)-1] + d.tracking_anchors = d.tracking_anchors[0 : len(d.tracking_anchors)-1] + // remove the anchor, replaying events shouldn't have anchors + events[0].anchor = nil + // we went one too many, remove the extra event + events = events[:len(events)-1] + // if nested, append to all the other anchors + for i, e := range d.tracking_anchors { + d.tracking_anchors[i] = append(e, events...) + } + d.anchors[anchor] = events + } +} + +func (d *Decoder) indirect(v reflect.Value, decodingNull bool) (Unmarshaler, reflect.Value) { + // If v is a named type and is addressable, + // start with its address, so that if the type has pointer methods, + // we find them. + if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() { + v = v.Addr() + } + for { + // Load value from interface, but only if the result will be + // usefully addressable. + if v.Kind() == reflect.Interface && !v.IsNil() { + e := v.Elem() + if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) { + v = e + continue + } + } + + if v.Kind() != reflect.Ptr { + break + } + + if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() { + break + } + + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + + if v.Type().NumMethod() > 0 { + if u, ok := v.Interface().(Unmarshaler); ok { + var temp interface{} + return u, reflect.ValueOf(&temp) + } + } + + v = v.Elem() + } + + return nil, v +} + +func (d *Decoder) sequence(v reflect.Value) { + if d.event.event_type != yaml_SEQUENCE_START_EVENT { + d.error(fmt.Errorf("Expected sequence start at %s", d.event.start_mark)) + } + + u, pv := d.indirect(v, false) + if u != nil { + defer func() { + if err := u.UnmarshalYAML(yaml_SEQ_TAG, pv.Interface()); err != nil { + d.error(err) + } + }() + _, pv = d.indirect(pv, false) + } + + v = pv + + // Check type of target. + switch v.Kind() { + case reflect.Interface: + if v.NumMethod() == 0 { + // Decoding into nil interface? Switch to non-reflect code. + v.Set(reflect.ValueOf(d.sequenceInterface())) + return + } + // Otherwise it's invalid. + fallthrough + default: + d.error(fmt.Errorf("Expected an array, slice or interface{} but was a %s at %s", v, d.event.start_mark)) + case reflect.Array: + case reflect.Slice: + break + } + + d.nextEvent() + + i := 0 +done: + for { + switch d.event.event_type { + case yaml_SEQUENCE_END_EVENT, yaml_DOCUMENT_END_EVENT: + break done + } + + // Get element of array, growing if necessary. + if v.Kind() == reflect.Slice { + // Grow slice if necessary + if i >= v.Cap() { + newcap := v.Cap() + v.Cap()/2 + if newcap < 4 { + newcap = 4 + } + newv := reflect.MakeSlice(v.Type(), v.Len(), newcap) + reflect.Copy(newv, v) + v.Set(newv) + } + if i >= v.Len() { + v.SetLen(i + 1) + } + } + + if i < v.Len() { + // Decode into element. + d.parse(v.Index(i)) + } else { + // Ran out of fixed array: skip. + d.parse(reflect.Value{}) + } + i++ + } + + if i < v.Len() { + if v.Kind() == reflect.Array { + // Array. Zero the rest. + z := reflect.Zero(v.Type().Elem()) + for ; i < v.Len(); i++ { + v.Index(i).Set(z) + } + } else { + v.SetLen(i) + } + } + if i == 0 && v.Kind() == reflect.Slice { + v.Set(reflect.MakeSlice(v.Type(), 0, 0)) + } + + if d.event.event_type != yaml_DOCUMENT_END_EVENT { + d.nextEvent() + } +} + +func (d *Decoder) mapping(v reflect.Value) { + u, pv := d.indirect(v, false) + if u != nil { + defer func() { + if err := u.UnmarshalYAML(yaml_MAP_TAG, pv.Interface()); err != nil { + d.error(err) + } + }() + _, pv = d.indirect(pv, false) + } + v = pv + + // Decoding into nil interface? Switch to non-reflect code. + if v.Kind() == reflect.Interface && v.NumMethod() == 0 { + v.Set(reflect.ValueOf(d.mappingInterface())) + return + } + + // Check type of target: struct or map[X]Y + switch v.Kind() { + case reflect.Struct: + d.mappingStruct(v) + return + case reflect.Map: + default: + d.error(fmt.Errorf("Expected a struct or map but was a %s at %s ", v, d.event.start_mark)) + } + + mapt := v.Type() + if v.IsNil() { + v.Set(reflect.MakeMap(mapt)) + } + + d.nextEvent() + + keyt := mapt.Key() + mapElemt := mapt.Elem() + + var mapElem reflect.Value +done: + for { + switch d.event.event_type { + case yaml_MAPPING_END_EVENT: + break done + case yaml_DOCUMENT_END_EVENT: + return + } + + key := reflect.New(keyt) + d.parse(key.Elem()) + + if !mapElem.IsValid() { + mapElem = reflect.New(mapElemt).Elem() + } else { + mapElem.Set(reflect.Zero(mapElemt)) + } + + d.parse(mapElem) + + v.SetMapIndex(key.Elem(), mapElem) + } + + d.nextEvent() +} + +func (d *Decoder) mappingStruct(v reflect.Value) { + + structt := v.Type() + fields := cachedTypeFields(structt) + + d.nextEvent() + +done: + for { + switch d.event.event_type { + case yaml_MAPPING_END_EVENT: + break done + case yaml_DOCUMENT_END_EVENT: + return + } + + key := "" + d.parse(reflect.ValueOf(&key)) + + // Figure out field corresponding to key. + var subv reflect.Value + + var f *field + for i := range fields { + ff := &fields[i] + if ff.name == key { + f = ff + break + } + + if f == nil && strings.EqualFold(ff.name, key) { + f = ff + } + } + + if f != nil { + subv = v + for _, i := range f.index { + if subv.Kind() == reflect.Ptr { + if subv.IsNil() { + subv.Set(reflect.New(subv.Type().Elem())) + } + subv = subv.Elem() + } + subv = subv.Field(i) + } + } + d.parse(subv) + } + + d.nextEvent() +} + +func (d *Decoder) scalar(v reflect.Value) { + val := string(d.event.value) + wantptr := null_values[val] + + u, pv := d.indirect(v, wantptr) + + var tag string + if u != nil { + defer func() { + if err := u.UnmarshalYAML(tag, pv.Interface()); err != nil { + d.error(err) + } + }() + + _, pv = d.indirect(pv, wantptr) + } + v = pv + + var err error + tag, err = resolve(d.event, v, d.useNumber) + if err != nil { + d.error(err) + } + + d.nextEvent() +} + +func (d *Decoder) alias(rv reflect.Value) { + val, ok := d.anchors[string(d.event.anchor)] + if !ok { + d.error(fmt.Errorf("missing anchor: '%s' at %s", d.event.anchor, d.event.start_mark)) + } + + d.replay_events = val + d.nextEvent() + d.parse(rv) +} + +func (d *Decoder) valueInterface() interface{} { + var v interface{} + + anchor := string(d.event.anchor) + switch d.event.event_type { + case yaml_SEQUENCE_START_EVENT: + d.begin_anchor(anchor) + v = d.sequenceInterface() + case yaml_MAPPING_START_EVENT: + d.begin_anchor(anchor) + v = d.mappingInterface() + case yaml_SCALAR_EVENT: + d.begin_anchor(anchor) + v = d.scalarInterface() + case yaml_ALIAS_EVENT: + rv := reflect.ValueOf(&v) + d.alias(rv) + return v + case yaml_DOCUMENT_END_EVENT: + d.error(&UnexpectedEventError{ + Value: string(d.event.value), + EventType: d.event.event_type, + At: d.event.start_mark, + }) + + } + d.end_anchor(anchor) + + return v +} + +func (d *Decoder) scalarInterface() interface{} { + _, v := resolveInterface(d.event, d.useNumber) + + d.nextEvent() + return v +} + +// arrayInterface is like array but returns []interface{}. +func (d *Decoder) sequenceInterface() []interface{} { + var v = make([]interface{}, 0) + + d.nextEvent() + +done: + for { + switch d.event.event_type { + case yaml_SEQUENCE_END_EVENT, yaml_DOCUMENT_END_EVENT: + break done + } + + v = append(v, d.valueInterface()) + } + + if d.event.event_type != yaml_DOCUMENT_END_EVENT { + d.nextEvent() + } + + return v +} + +// objectInterface is like object but returns map[string]interface{}. +func (d *Decoder) mappingInterface() map[interface{}]interface{} { + m := make(map[interface{}]interface{}) + + d.nextEvent() + +done: + for { + switch d.event.event_type { + case yaml_MAPPING_END_EVENT, yaml_DOCUMENT_END_EVENT: + break done + } + + key := d.valueInterface() + + // Read value. + m[key] = d.valueInterface() + } + + if d.event.event_type != yaml_DOCUMENT_END_EVENT { + d.nextEvent() + } + + return m +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/decode_test.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/decode_test.go new file mode 100644 index 0000000..6ba1275 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/decode_test.go @@ -0,0 +1,906 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "math" + "os" + "strconv" + "strings" + "time" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("Decode", func() { + It("Decodes a file", func() { + f, _ := os.Open("fixtures/specification/example2_1.yaml") + d := NewDecoder(f) + var v interface{} + err := d.Decode(&v) + + Expect(err).NotTo(HaveOccurred()) + }) + + Context("strings", func() { + It("Decodes an empty string", func() { + d := NewDecoder(strings.NewReader(`"" +`)) + var v string + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal("")) + }) + + It("Decodes an empty string to an interface", func() { + d := NewDecoder(strings.NewReader(`"" +`)) + var v interface{} + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal("")) + }) + + It("Decodes a map containing empty strings to an interface", func() { + d := NewDecoder(strings.NewReader(`"" : "" +`)) + var v interface{} + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[interface{}]interface{}{"": ""})) + }) + }) + + Context("Sequence", func() { + It("Decodes to interface{}s", func() { + f, _ := os.Open("fixtures/specification/example2_1.yaml") + d := NewDecoder(f) + var v interface{} + err := d.Decode(&v) + + Expect(err).NotTo(HaveOccurred()) + Expect((v).([]interface{})).To(Equal([]interface{}{"Mark McGwire", "Sammy Sosa", "Ken Griffey"})) + }) + + It("Decodes to []string", func() { + f, _ := os.Open("fixtures/specification/example2_1.yaml") + d := NewDecoder(f) + v := make([]string, 0, 3) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal([]string{"Mark McGwire", "Sammy Sosa", "Ken Griffey"})) + }) + + It("Decodes a sequence of maps", func() { + f, _ := os.Open("fixtures/specification/example2_12.yaml") + d := NewDecoder(f) + v := make([]map[string]interface{}, 1) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal([]map[string]interface{}{ + {"item": "Super Hoop", "quantity": int64(1)}, + {"item": "Basketball", "quantity": int64(4)}, + {"item": "Big Shoes", "quantity": int64(1)}, + })) + + }) + + Describe("As structs", func() { + It("Simple struct", func() { + f, _ := os.Open("fixtures/specification/example2_4.yaml") + d := NewDecoder(f) + + type batter struct { + Name string + HR int64 + AVG float64 + } + v := make([]batter, 0, 1) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal([]batter{ + batter{Name: "Mark McGwire", HR: 65, AVG: 0.278}, + batter{Name: "Sammy Sosa", HR: 63, AVG: 0.288}, + })) + + }) + + It("Tagged struct", func() { + f, _ := os.Open("fixtures/specification/example2_4.yaml") + d := NewDecoder(f) + + type batter struct { + N string `yaml:"name"` + H int64 `yaml:"hr"` + A float64 `yaml:"avg"` + } + v := make([]batter, 0, 1) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal([]batter{ + batter{N: "Mark McGwire", H: 65, A: 0.278}, + batter{N: "Sammy Sosa", H: 63, A: 0.288}, + })) + + }) + + It("handles null values", func() { + type S struct { + Default interface{} + } + + d := NewDecoder(strings.NewReader(` +--- +default: +`)) + var s S + err := d.Decode(&s) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(Equal(S{Default: nil})) + + }) + + It("ignores missing tags", func() { + f, _ := os.Open("fixtures/specification/example2_4.yaml") + d := NewDecoder(f) + + type batter struct { + N string `yaml:"name"` + HR int64 + A float64 + } + v := make([]batter, 0, 1) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal([]batter{ + batter{N: "Mark McGwire", HR: 65}, + batter{N: "Sammy Sosa", HR: 63}, + })) + + }) + }) + + It("Decodes a sequence of sequences", func() { + f, _ := os.Open("fixtures/specification/example2_5.yaml") + d := NewDecoder(f) + v := make([][]interface{}, 1) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal([][]interface{}{ + {"name", "hr", "avg"}, + {"Mark McGwire", int64(65), float64(0.278)}, + {"Sammy Sosa", int64(63), float64(0.288)}, + })) + + }) + }) + + Context("Maps", func() { + It("Decodes to interface{}s", func() { + f, _ := os.Open("fixtures/specification/example2_2.yaml") + d := NewDecoder(f) + var v interface{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect((v).(map[interface{}]interface{})).To(Equal(map[interface{}]interface{}{ + "hr": int64(65), + "avg": float64(0.278), + "rbi": int64(147), + })) + + }) + + It("Decodes to a struct", func() { + f, _ := os.Open("fixtures/specification/example2_2.yaml") + d := NewDecoder(f) + + type batter struct { + HR int64 + AVG float64 + RBI int64 + } + v := batter{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(batter{HR: 65, AVG: 0.278, RBI: 147})) + }) + + It("Decodes to a map of string arrays", func() { + f, _ := os.Open("fixtures/specification/example2_9.yaml") + d := NewDecoder(f) + v := make(map[string][]string) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string][]string{"hr": []string{"Mark McGwire", "Sammy Sosa"}, "rbi": []string{"Sammy Sosa", "Ken Griffey"}})) + }) + }) + + Context("Sequence of Maps", func() { + It("Decodes to interface{}s", func() { + f, _ := os.Open("fixtures/specification/example2_4.yaml") + d := NewDecoder(f) + var v interface{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect((v).([]interface{})).To(Equal([]interface{}{ + map[interface{}]interface{}{"name": "Mark McGwire", "hr": int64(65), "avg": float64(0.278)}, + map[interface{}]interface{}{"name": "Sammy Sosa", "hr": int64(63), "avg": float64(0.288)}, + })) + + }) + }) + + It("Decodes ascii art", func() { + f, _ := os.Open("fixtures/specification/example2_13.yaml") + d := NewDecoder(f) + v := "" + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(`\//||\/|| +// || ||__ +`)) + + }) + + It("Decodes folded strings", func() { + f, _ := os.Open("fixtures/specification/example2_15.yaml") + d := NewDecoder(f) + v := "" + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal("Sammy Sosa completed another fine season with great stats.\n\n 63 Home Runs\n 0.288 Batting Average\n\nWhat a year!\n")) + }) + + It("Decodes literal and folded strings with indents", func() { + f, _ := os.Open("fixtures/specification/example2_16.yaml") + d := NewDecoder(f) + v := make(map[string]string) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]string{ + "name": "Mark McGwire", + "accomplishment": `Mark set a major league home run record in 1998. +`, + "stats": `65 Home Runs +0.278 Batting Average +`, + })) + + }) + + It("Decodes single quoted", func() { + f, _ := os.Open("fixtures/specification/example2_17_quoted.yaml") + d := NewDecoder(f) + v := make(map[string]string) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]string{ + "quoted": ` # not a 'comment'.`, + })) + + }) + + Context("ints", func() { + It("Decodes into an interface{}", func() { + f, _ := os.Open("fixtures/specification/example2_19.yaml") + d := NewDecoder(f) + v := make(map[string]interface{}) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]interface{}{ + "canonical": int64(12345), + "decimal": int64(12345), + "octal": int64(12), + "hexadecimal": int64(12), + })) + + }) + + It("Decodes into int64", func() { + f, _ := os.Open("fixtures/specification/example2_19.yaml") + d := NewDecoder(f) + v := make(map[string]int64) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]int64{ + "canonical": int64(12345), + "decimal": int64(12345), + "octal": int64(12), + "hexadecimal": int64(12), + })) + + }) + + Context("boundary values", func() { + intoInt64 := func(val int64) { + It("Decodes into an int64 value", func() { + var v int64 + + d := NewDecoder(strings.NewReader(strconv.FormatInt(val, 10))) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(val)) + + }) + } + + intoInt := func(val int) { + It("Decodes into an int value", func() { + var v int + + d := NewDecoder(strings.NewReader(strconv.FormatInt(int64(val), 10))) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(val)) + + }) + } + + intoInterface := func(val int64) { + It("Decodes into an interface{}", func() { + var v interface{} + + d := NewDecoder(strings.NewReader(strconv.FormatInt(val, 10))) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(val)) + }) + } + + intoInt64(math.MaxInt64) + intoInterface(math.MaxInt64) + + intoInt64(math.MinInt64) + intoInterface(math.MinInt64) + + intoInt(math.MaxInt32) + intoInt(math.MinInt32) + }) + }) + + It("Decodes a variety of floats", func() { + f, _ := os.Open("fixtures/specification/example2_20.yaml") + d := NewDecoder(f) + v := make(map[string]float64) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + + Expect(math.IsNaN(v["not a number"])).To(BeTrue()) + delete(v, "not a number") + + Expect(v).To(Equal(map[string]float64{ + "canonical": float64(1230.15), + "exponential": float64(1230.15), + "fixed": float64(1230.15), + "negative infinity": math.Inf(-1), + })) + + }) + + It("Decodes booleans, nil and strings", func() { + f, _ := os.Open("fixtures/specification/example2_21.yaml") + d := NewDecoder(f) + v := make(map[string]interface{}) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]interface{}{ + "": interface{}(nil), + "true": true, + "false": false, + "string": "12345", + })) + + }) + + It("Decodes a null ptr", func() { + d := NewDecoder(strings.NewReader(`null +`)) + var v *bool + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(BeNil()) + }) + + It("Decodes dates/time", func() { + f, _ := os.Open("fixtures/specification/example2_22.yaml") + d := NewDecoder(f) + v := make(map[string]time.Time) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]time.Time{ + "canonical": time.Date(2001, time.December, 15, 2, 59, 43, int(1*time.Millisecond), time.UTC), + "iso8601": time.Date(2001, time.December, 14, 21, 59, 43, int(10*time.Millisecond), time.FixedZone("", -5*3600)), + "spaced": time.Date(2001, time.December, 14, 21, 59, 43, int(10*time.Millisecond), time.FixedZone("", -5*3600)), + "date": time.Date(2002, time.December, 14, 0, 0, 0, 0, time.UTC), + })) + + }) + + Context("Tags", func() { + It("Respects tags", func() { + f, _ := os.Open("fixtures/specification/example2_23_non_date.yaml") + d := NewDecoder(f) + v := make(map[string]string) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]string{ + "not-date": "2002-04-28", + })) + + }) + + It("handles non-specific tags", func() { + d := NewDecoder(strings.NewReader(` +--- +not_parsed: ! 123 +`)) + v := make(map[string]int) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]int{"not_parsed": 123})) + }) + + It("handles non-specific tags", func() { + d := NewDecoder(strings.NewReader(` +--- +? a complex key +: ! "123" +`)) + v := make(map[string]string) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]string{"a complex key": "123"})) + }) + }) + + Context("Decodes binary/base64", func() { + It("to []byte", func() { + f, _ := os.Open("fixtures/specification/example2_23_picture.yaml") + d := NewDecoder(f) + v := make(map[string][]byte) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string][]byte{ + "picture": []byte{0x47, 0x49, 0x46, 0x38, 0x39, 0x61, 0x0c, 0x00, + 0x0c, 0x00, 0x84, 0x00, 0x00, 0xff, 0xff, 0xf7, 0xf5, 0xf5, 0xee, + 0xe9, 0xe9, 0xe5, 0x66, 0x66, 0x66, 0x00, 0x00, 0x00, 0xe7, 0xe7, + 0xe7, 0x5e, 0x5e, 0x5e, 0xf3, 0xf3, 0xed, 0x8e, 0x8e, 0x8e, 0xe0, + 0xe0, 0xe0, 0x9f, 0x9f, 0x9f, 0x93, 0x93, 0x93, 0xa7, 0xa7, 0xa7, + 0x9e, 0x9e, 0x9e, 0x69, 0x5e, 0x10, 0x27, 0x20, 0x82, 0x0a, 0x01, + 0x00, 0x3b}, + })) + + }) + + It("to string", func() { + d := NewDecoder(strings.NewReader("!binary YWJjZGVmZw==")) + var v string + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal("abcdefg")) + }) + + It("to string via alternate form", func() { + d := NewDecoder(strings.NewReader("!!binary YWJjZGVmZw==")) + var v string + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal("abcdefg")) + }) + + It("to interface", func() { + d := NewDecoder(strings.NewReader("!binary YWJjZGVmZw==")) + var v interface{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal([]byte("abcdefg"))) + }) + }) + + Context("Aliases", func() { + Context("to known types", func() { + It("aliases scalars", func() { + f, _ := os.Open("fixtures/specification/example2_10.yaml") + d := NewDecoder(f) + v := make(map[string][]string) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string][]string{ + "hr": {"Mark McGwire", "Sammy Sosa"}, + "rbi": {"Sammy Sosa", "Ken Griffey"}, + })) + + }) + + It("aliases sequences", func() { + d := NewDecoder(strings.NewReader(` +--- +hr: &ss + - MG + - SS +rbi: *ss +`)) + v := make(map[string][]string) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string][]string{ + "hr": {"MG", "SS"}, + "rbi": {"MG", "SS"}, + })) + + }) + + It("aliases maps", func() { + d := NewDecoder(strings.NewReader(` +--- +hr: &ss + MG : SS +rbi: *ss +`)) + v := make(map[string]map[string]string) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]map[string]string{ + "hr": {"MG": "SS"}, + "rbi": {"MG": "SS"}, + })) + + }) + }) + + It("aliases to different types", func() { + type S struct { + A map[string]int + C map[string]string + } + d := NewDecoder(strings.NewReader(` +--- +a: &map + b : 1 +c: *map +`)) + var s S + err := d.Decode(&s) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(Equal(S{ + A: map[string]int{"b": 1}, + C: map[string]string{"b": "1"}, + })) + + }) + + It("fails if an anchor is undefined", func() { + d := NewDecoder(strings.NewReader(` +--- +a: *missing +`)) + m := make(map[string]string) + err := d.Decode(&m) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(MatchRegexp("missing anchor.*line.*column.*")) + }) + + Context("to Interface", func() { + It("aliases scalars", func() { + f, _ := os.Open("fixtures/specification/example2_10.yaml") + d := NewDecoder(f) + v := make(map[string]interface{}) + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]interface{}{ + "hr": []interface{}{"Mark McGwire", "Sammy Sosa"}, + "rbi": []interface{}{"Sammy Sosa", "Ken Griffey"}, + })) + + }) + + It("aliases sequences", func() { + d := NewDecoder(strings.NewReader(` +--- +hr: &ss + - MG + - SS +rbi: *ss +`)) + v := make(map[string]interface{}) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]interface{}{ + "hr": []interface{}{"MG", "SS"}, + "rbi": []interface{}{"MG", "SS"}, + })) + + }) + + It("aliases maps", func() { + d := NewDecoder(strings.NewReader(` +--- +hr: &ss + MG : SS +rbi: *ss +`)) + v := make(map[string]interface{}) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]interface{}{ + "hr": map[interface{}]interface{}{"MG": "SS"}, + "rbi": map[interface{}]interface{}{"MG": "SS"}, + })) + + }) + + It("supports duplicate aliases", func() { + d := NewDecoder(strings.NewReader(` +--- +a: &a + b: 1 +x: *a +y: *a +`)) + v := make(map[string]interface{}) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]interface{}{ + "a": map[interface{}]interface{}{"b": int64(1)}, + "x": map[interface{}]interface{}{"b": int64(1)}, + "y": map[interface{}]interface{}{"b": int64(1)}, + })) + + }) + + It("supports overriden anchors", func() { + d := NewDecoder(strings.NewReader(` +--- +First occurrence: &anchor Foo +Second occurrence: *anchor +Override anchor: &anchor Bar +Reuse anchor: *anchor +`)) + v := make(map[string]interface{}) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]interface{}{ + "First occurrence": "Foo", + "Second occurrence": "Foo", + "Override anchor": "Bar", + "Reuse anchor": "Bar", + })) + + }) + + It("fails if an anchor is undefined", func() { + d := NewDecoder(strings.NewReader(` +--- +a: *missing +`)) + var i interface{} + err := d.Decode(&i) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(MatchRegexp("missing anchor.*line.*column.*")) + }) + + }) + + It("supports composing aliases", func() { + d := NewDecoder(strings.NewReader(` +--- +a: &a b +x: &b + d: *a +z: *b +`)) + v := make(map[string]interface{}) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]interface{}{ + "a": "b", + "x": map[interface{}]interface{}{"d": "b"}, + "z": map[interface{}]interface{}{"d": "b"}, + })) + + }) + + It("redefinition while composing aliases", func() { + d := NewDecoder(strings.NewReader(` +--- +a: &a b +x: &c + d : &a 1 +y: *a +`)) + v := make(map[string]interface{}) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(map[string]interface{}{ + "a": "b", + "x": map[interface{}]interface{}{"d": int64(1)}, + "y": int64(1), + })) + + }) + + It("can parse nested anchors", func() { + d := NewDecoder(strings.NewReader(` +--- +a: + aa: &x + aaa: 1 + ab: + aba: &y + abaa: + abaaa: *x +b: +- ba: + baa: *y +`)) + v := make(map[string]interface{}) + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + }) + }) + + Context("When decoding fails", func() { + It("returns an error", func() { + f, _ := os.Open("fixtures/specification/example_empty.yaml") + d := NewDecoder(f) + var v interface{} + + err := d.Decode(&v) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("Expected document start at line 0, column 0")) + }) + }) + + Context("Unmarshaler support", func() { + Context("Receiver is a value", func() { + It("the Marshaler interface is not used", func() { + d := NewDecoder(strings.NewReader("abc\n")) + v := hasMarshaler{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v.Value).To(BeNil()) + }) + }) + + Context("Receiver is a pointer", func() { + It("uses the Marshaler interface when a pointer", func() { + d := NewDecoder(strings.NewReader("abc\n")) + v := hasPtrMarshaler{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + }) + + It("marshals a scalar", func() { + d := NewDecoder(strings.NewReader("abc\n")) + v := hasPtrMarshaler{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v.Tag).To(Equal(yaml_STR_TAG)) + Expect(v.Value).To(Equal("abc")) + }) + + It("marshals a sequence", func() { + d := NewDecoder(strings.NewReader("[abc, def]\n")) + v := hasPtrMarshaler{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v.Tag).To(Equal(yaml_SEQ_TAG)) + Expect(v.Value).To(Equal([]interface{}{"abc", "def"})) + }) + + It("marshals a map", func() { + d := NewDecoder(strings.NewReader("{ a: bc}\n")) + v := hasPtrMarshaler{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v.Tag).To(Equal(yaml_MAP_TAG)) + Expect(v.Value).To(Equal(map[interface{}]interface{}{"a": "bc"})) + }) + }) + }) + + Context("Marshals into a Number", func() { + It("when the number is an int", func() { + d := NewDecoder(strings.NewReader("123\n")) + d.UseNumber() + var v Number + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v.String()).To(Equal("123")) + }) + + It("when the number is an float", func() { + d := NewDecoder(strings.NewReader("1.23\n")) + d.UseNumber() + var v Number + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v.String()).To(Equal("1.23")) + }) + + It("it fails when its a non-Number", func() { + d := NewDecoder(strings.NewReader("on\n")) + d.UseNumber() + var v Number + + err := d.Decode(&v) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(MatchRegexp("Not a number: 'on' at line 0, column 0")) + }) + + It("returns a Number", func() { + d := NewDecoder(strings.NewReader("123\n")) + d.UseNumber() + var v interface{} + + err := d.Decode(&v) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(BeAssignableToTypeOf(Number(""))) + + n := v.(Number) + Expect(n.String()).To(Equal("123")) + }) + }) + Context("When there are special characters", func() { + It("returns an error", func() { + d := NewDecoder(strings.NewReader(` +--- +applications: + - name: m + services: + - !@# +`)) + var v interface{} + + err := d.Decode(&v) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(MatchRegexp("yaml.*did not find.*line.*column.*")) + }) + }) +}) diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/emitter.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/emitter.go new file mode 100644 index 0000000..a42df05 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/emitter.go @@ -0,0 +1,2072 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "bytes" +) + +var default_tag_directives = []yaml_tag_directive_t{ + {[]byte("!"), []byte("!")}, + {[]byte("!!"), []byte("tag:yaml.org,2002:")}, +} + +/* + * Flush the buffer if needed. + */ + +func flush(emitter *yaml_emitter_t) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) { + return yaml_emitter_flush(emitter) + } + return true +} + +/* + * Put a character to the output buffer. + */ +func put(emitter *yaml_emitter_t, value byte) bool { + if !flush(emitter) { + return false + } + + emitter.buffer[emitter.buffer_pos] = value + emitter.buffer_pos++ + emitter.column++ + return true +} + +/* + * Put a line break to the output buffer. + */ + +func put_break(emitter *yaml_emitter_t) bool { + if !flush(emitter) { + return false + } + switch emitter.line_break { + case yaml_CR_BREAK: + emitter.buffer[emitter.buffer_pos] = '\r' + emitter.buffer_pos++ + case yaml_LN_BREAK: + emitter.buffer[emitter.buffer_pos] = '\n' + emitter.buffer_pos++ + case yaml_CRLN_BREAK: + emitter.buffer[emitter.buffer_pos] = '\r' + emitter.buffer[emitter.buffer_pos] = '\n' + emitter.buffer_pos += 2 + default: + return false + } + emitter.column = 0 + emitter.line++ + return true +} + +/* + * Copy a character from a string into buffer. + */ +func write(emitter *yaml_emitter_t, src []byte, src_pos *int) bool { + if !flush(emitter) { + return false + } + copy_bytes(emitter.buffer, &emitter.buffer_pos, src, src_pos) + emitter.column++ + return true +} + +/* + * Copy a line break character from a string into buffer. + */ + +func write_break(emitter *yaml_emitter_t, src []byte, src_pos *int) bool { + if src[*src_pos] == '\n' { + if !put_break(emitter) { + return false + } + *src_pos++ + } else { + if !write(emitter, src, src_pos) { + return false + } + emitter.column = 0 + emitter.line++ + } + + return true +} + +/* + * Set an emitter error and return 0. + */ + +func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { + emitter.error = yaml_EMITTER_ERROR + emitter.problem = problem + return false +} + +/* + * Emit an event. + */ + +func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { + emitter.events = append(emitter.events, *event) + for !yaml_emitter_need_more_events(emitter) { + event := &emitter.events[emitter.events_head] + if !yaml_emitter_analyze_event(emitter, event) { + return false + } + if !yaml_emitter_state_machine(emitter, event) { + return false + } + yaml_event_delete(event) + emitter.events_head++ + } + return true +} + +/* + * Check if we need to accumulate more events before emitting. + * + * We accumulate extra + * - 1 event for DOCUMENT-START + * - 2 events for SEQUENCE-START + * - 3 events for MAPPING-START + */ + +func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { + if emitter.events_head == len(emitter.events) { + return true + } + + accumulate := 0 + switch emitter.events[emitter.events_head].event_type { + case yaml_DOCUMENT_START_EVENT: + accumulate = 1 + case yaml_SEQUENCE_START_EVENT: + accumulate = 2 + case yaml_MAPPING_START_EVENT: + accumulate = 3 + default: + return false + } + + if len(emitter.events)-emitter.events_head > accumulate { + return false + } + + level := 0 + for i := emitter.events_head; i < len(emitter.events); i++ { + switch emitter.events[i].event_type { + case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: + level++ + case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: + level-- + } + + if level == 0 { + return false + } + } + return true +} + +/* + * Append a directive to the directives stack. + */ + +func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, + value *yaml_tag_directive_t, allow_duplicates bool) bool { + + for i := range emitter.tag_directives { + + if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { + if allow_duplicates { + return true + } + return yaml_emitter_set_emitter_error(emitter, "duplicat %TAG directive") + } + } + + tag_copy := yaml_tag_directive_t{ + handle: value.handle, + prefix: value.prefix, + } + + emitter.tag_directives = append(emitter.tag_directives, tag_copy) + + return true +} + +/* + * Increase the indentation level. + */ + +func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow bool, indentless bool) bool { + + emitter.indents = append(emitter.indents, emitter.indent) + + if emitter.indent < 0 { + if flow { + emitter.indent = emitter.best_indent + } else { + emitter.indent = 0 + } + } else if !indentless { + emitter.indent += emitter.best_indent + } + + return true +} + +/* + * State dispatcher. + */ + +func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { + switch emitter.state { + case yaml_EMIT_STREAM_START_STATE: + return yaml_emitter_emit_stream_start(emitter, event) + + case yaml_EMIT_FIRST_DOCUMENT_START_STATE: + return yaml_emitter_emit_document_start(emitter, event, true) + + case yaml_EMIT_DOCUMENT_START_STATE: + return yaml_emitter_emit_document_start(emitter, event, false) + + case yaml_EMIT_DOCUMENT_CONTENT_STATE: + return yaml_emitter_emit_document_content(emitter, event) + + case yaml_EMIT_DOCUMENT_END_STATE: + return yaml_emitter_emit_document_end(emitter, event) + + case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, true) + + case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, false) + + case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, true) + + case yaml_EMIT_FLOW_MAPPING_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, false) + + case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: + return yaml_emitter_emit_flow_mapping_value(emitter, event, true) + + case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: + return yaml_emitter_emit_flow_mapping_value(emitter, event, false) + + case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: + return yaml_emitter_emit_block_sequence_item(emitter, event, true) + + case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: + return yaml_emitter_emit_block_sequence_item(emitter, event, false) + + case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: + return yaml_emitter_emit_block_mapping_key(emitter, event, true) + + case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: + return yaml_emitter_emit_block_mapping_key(emitter, event, false) + + case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: + return yaml_emitter_emit_block_mapping_value(emitter, event, true) + + case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: + return yaml_emitter_emit_block_mapping_value(emitter, event, false) + + case yaml_EMIT_END_STATE: + return yaml_emitter_set_emitter_error(emitter, + "expected nothing after STREAM-END") + + } + + panic("invalid state") +} + +/* + * Expect STREAM-START. + */ + +func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + + if event.event_type != yaml_STREAM_START_EVENT { + return yaml_emitter_set_emitter_error(emitter, + "expected STREAM-START") + } + + if emitter.encoding == yaml_ANY_ENCODING { + emitter.encoding = event.encoding + + if emitter.encoding == yaml_ANY_ENCODING { + emitter.encoding = yaml_UTF8_ENCODING + } + } + + if emitter.best_indent < 2 || emitter.best_indent > 9 { + emitter.best_indent = 2 + } + + if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { + emitter.best_width = 80 + } + + if emitter.best_width < 0 { + emitter.best_width = 1<<31 - 1 + } + + if emitter.line_break == yaml_ANY_BREAK { + emitter.line_break = yaml_LN_BREAK + } + + emitter.indent = -1 + + emitter.line = 0 + emitter.column = 0 + emitter.whitespace = true + emitter.indention = true + + if emitter.encoding != yaml_UTF8_ENCODING { + if !yaml_emitter_write_bom(emitter) { + return false + } + } + + emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE + + return true +} + +/* + * Expect DOCUMENT-START or STREAM-END. + */ + +func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, + event *yaml_event_t, first bool) bool { + + if event.event_type == yaml_DOCUMENT_START_EVENT { + if event.version_directive != nil { + if !yaml_emitter_analyze_version_directive(emitter, + *event.version_directive) { + return false + } + } + + for i := range event.tag_directives { + tag_directive := &event.tag_directives[i] + + if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { + return false + } + if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { + return false + } + } + + for i := range default_tag_directives { + if !yaml_emitter_append_tag_directive(emitter, &default_tag_directives[i], true) { + return false + } + } + + implicit := event.implicit + if !first || emitter.canonical { + implicit = false + } + + if (event.version_directive != nil || len(event.tag_directives) > 0) && + emitter.open_ended { + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if event.version_directive != nil { + implicit = false + if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { + return false + } + + if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { + return false + } + + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if len(event.tag_directives) > 0 { + implicit = false + for i := range event.tag_directives { + tag_directive := &event.tag_directives[i] + + if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { + return false + } + if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { + return false + } + if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + } + + if yaml_emitter_check_empty_document(emitter) { + implicit = false + } + + if !implicit { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { + return false + } + + if emitter.canonical { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + } + + emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE + + return true + } else if event.event_type == yaml_STREAM_END_EVENT { + if emitter.open_ended { + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if !yaml_emitter_flush(emitter) { + return false + } + + emitter.state = yaml_EMIT_END_STATE + + return true + } + + return yaml_emitter_set_emitter_error(emitter, + "expected DOCUMENT-START or STREAM-END") +} + +/* + * Expect the root node. + */ + +func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { + emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) + + return yaml_emitter_emit_node(emitter, event, true, false, false, false) +} + +/* + * Expect DOCUMENT-END. + */ + +func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { + + if event.event_type != yaml_DOCUMENT_END_EVENT { + return yaml_emitter_set_emitter_error(emitter, + "expected DOCUMENT-END") + } + + if !yaml_emitter_write_indent(emitter) { + return false + } + if !event.implicit { + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_flush(emitter) { + return false + } + + emitter.state = yaml_EMIT_DOCUMENT_START_STATE + emitter.tag_directives = emitter.tag_directives[:0] + return true +} + +/* + * + * Expect a flow item node. + */ + +func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_write_indicator(emitter, []byte("["), true, true, false) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + emitter.flow_level++ + } + + if event.event_type == yaml_SEQUENCE_END_EVENT { + emitter.flow_level-- + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + if emitter.canonical && !first { + if !yaml_emitter_write_indicator(emitter, []byte(","), false, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte("]"), false, false, false) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + + return true + } + + if !first { + if !yaml_emitter_write_indicator(emitter, []byte(","), false, false, false) { + return false + } + } + + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) + return yaml_emitter_emit_node(emitter, event, false, true, false, false) +} + +/* + * Expect a flow key node. + */ + +func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, + event *yaml_event_t, first bool) bool { + + if first { + + if !yaml_emitter_write_indicator(emitter, []byte("{"), true, true, false) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + emitter.flow_level++ + } + + if event.event_type == yaml_MAPPING_END_EVENT { + emitter.flow_level-- + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + + if emitter.canonical && !first { + if !yaml_emitter_write_indicator(emitter, []byte(","), false, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte("}"), false, false, false) { + return false + } + + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + + return true + } + + if !first { + if !yaml_emitter_write_indicator(emitter, []byte(","), false, false, false) { + return false + } + } + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, true) + } else { + if !yaml_emitter_write_indicator(emitter, []byte("?"), true, false, false) { + return false + } + + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) + } +} + +/* + * Expect a flow value node. + */ + +func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, + event *yaml_event_t, simple bool) bool { + + if simple { + if !yaml_emitter_write_indicator(emitter, []byte(":"), false, false, false) { + return false + } + } else { + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte(":"), true, false, false) { + return false + } + } + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +/* + * Expect a block item node. + */ + +func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, + event *yaml_event_t, first bool) bool { + + if first { + if !yaml_emitter_increase_indent(emitter, false, + (emitter.mapping_context && !emitter.indention)) { + return false + } + } + + if event.event_type == yaml_SEQUENCE_END_EVENT { + + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + + return true + } + + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte("-"), true, false, true) { + return false + } + + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) + return yaml_emitter_emit_node(emitter, event, false, true, false, false) +} + +/* + * Expect a block key node. + */ + +func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, + event *yaml_event_t, first bool) bool { + + if first { + if !yaml_emitter_increase_indent(emitter, false, false) { + return false + } + } + + if event.event_type == yaml_MAPPING_END_EVENT { + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + + return true + } + + if !yaml_emitter_write_indent(emitter) { + return false + } + + if yaml_emitter_check_simple_key(emitter) { + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) + + return yaml_emitter_emit_node(emitter, event, false, false, true, true) + } else { + if !yaml_emitter_write_indicator(emitter, []byte("?"), true, false, true) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) + + return yaml_emitter_emit_node(emitter, event, false, false, true, false) + } +} + +/* + * Expect a block value node. + */ + +func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, + event *yaml_event_t, simple bool) bool { + + if simple { + if !yaml_emitter_write_indicator(emitter, []byte(":"), false, false, false) { + return false + } + } else { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte(":"), true, false, true) { + return false + } + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) + + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +/* + * Expect a node. + */ + +func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, + root bool, sequence bool, mapping bool, simple_key bool) bool { + emitter.root_context = root + emitter.sequence_context = sequence + emitter.mapping_context = mapping + emitter.simple_key_context = simple_key + + switch event.event_type { + case yaml_ALIAS_EVENT: + return yaml_emitter_emit_alias(emitter, event) + + case yaml_SCALAR_EVENT: + return yaml_emitter_emit_scalar(emitter, event) + + case yaml_SEQUENCE_START_EVENT: + return yaml_emitter_emit_sequence_start(emitter, event) + + case yaml_MAPPING_START_EVENT: + return yaml_emitter_emit_mapping_start(emitter, event) + + default: + return yaml_emitter_set_emitter_error(emitter, + "expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS") + } + + return false +} + +/* + * Expect ALIAS. + */ + +func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + + return true +} + +/* + * Expect SCALAR. + */ + +func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_select_scalar_style(emitter, event) { + return false + } + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + if !yaml_emitter_process_scalar(emitter) { + return false + } + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + + return true +} + +/* + * Expect SEQUENCE-START. + */ + +func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + + if emitter.flow_level > 0 || emitter.canonical || + event.style == yaml_style_t(yaml_FLOW_SEQUENCE_STYLE) || + yaml_emitter_check_empty_sequence(emitter) { + emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE + } else { + emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE + } + + return true +} + +/* + * Expect MAPPING-START. + */ + +func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + + if emitter.flow_level > 0 || emitter.canonical || + event.style == yaml_style_t(yaml_FLOW_MAPPING_STYLE) || + yaml_emitter_check_empty_mapping(emitter) { + emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE + } else { + emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE + } + + return true +} + +/* + * Check if the document content is an empty scalar. + */ + +func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { + return false +} + +/* + * Check if the next events represent an empty sequence. + */ + +func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { + if len(emitter.events)-emitter.events_head < 2 { + return false + } + + return (emitter.events[emitter.events_head].event_type == yaml_SEQUENCE_START_EVENT && + emitter.events[emitter.events_head+1].event_type == yaml_SEQUENCE_END_EVENT) +} + +/* + * Check if the next events represent an empty mapping. + */ + +func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { + if len(emitter.events)-emitter.events_head < 2 { + return false + } + + return (emitter.events[emitter.events_head].event_type == yaml_MAPPING_START_EVENT && + emitter.events[emitter.events_head+1].event_type == yaml_MAPPING_END_EVENT) +} + +/* + * Check if the next node can be expressed as a simple key. + */ + +func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { + length := 0 + + switch emitter.events[emitter.events_head].event_type { + case yaml_ALIAS_EVENT: + length += len(emitter.anchor_data.anchor) + + case yaml_SCALAR_EVENT: + if emitter.scalar_data.multiline { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + + len(emitter.scalar_data.value) + + case yaml_SEQUENCE_START_EVENT: + if !yaml_emitter_check_empty_sequence(emitter) { + return false + } + + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + + case yaml_MAPPING_START_EVENT: + if !yaml_emitter_check_empty_mapping(emitter) { + return false + } + + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + + default: + return false + } + + if length > 128 { + return false + } + + return true +} + +/* + * Determine an acceptable scalar style. + */ + +func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { + no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 + + if no_tag && !event.implicit && !event.quoted_implicit { + return yaml_emitter_set_emitter_error(emitter, + "neither tag nor implicit flags are specified") + } + + style := yaml_scalar_style_t(event.style) + + if style == yaml_ANY_SCALAR_STYLE { + style = yaml_PLAIN_SCALAR_STYLE + } + + if emitter.canonical { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + + if emitter.simple_key_context && emitter.scalar_data.multiline { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + + if style == yaml_PLAIN_SCALAR_STYLE { + if (emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed) || + (emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed) { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + if len(emitter.scalar_data.value) == 0 && + (emitter.flow_level > 0 || emitter.simple_key_context) { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + if no_tag && !event.implicit { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + } + + if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { + if !emitter.scalar_data.single_quoted_allowed { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + } + + if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { + if !emitter.scalar_data.block_allowed || + emitter.flow_level > 0 || emitter.simple_key_context { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + } + + if no_tag && !event.quoted_implicit && + style != yaml_PLAIN_SCALAR_STYLE { + emitter.tag_data.handle = []byte("!") + } + + emitter.scalar_data.style = style + + return true +} + +/* + * Write an achor. + */ + +func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { + if emitter.anchor_data.anchor == nil { + return true + } + + indicator := "*" + if !emitter.anchor_data.alias { + indicator = "&" + } + if !yaml_emitter_write_indicator(emitter, []byte(indicator), true, false, false) { + return false + } + + return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) +} + +/* + * Write a tag. + */ + +func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { + if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { + return true + } + + if len(emitter.tag_data.handle) > 0 { + if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { + return false + } + + if len(emitter.tag_data.suffix) > 0 { + if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { + return false + } + + } + } else { + if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { + return false + } + + if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { + return false + } + + if !yaml_emitter_write_indicator(emitter, []byte(">"), false, false, false) { + return false + } + + } + + return true +} + +/* + * Write a scalar. + */ + +func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { + switch emitter.scalar_data.style { + case yaml_PLAIN_SCALAR_STYLE: + return yaml_emitter_write_plain_scalar(emitter, + emitter.scalar_data.value, + !emitter.simple_key_context) + + case yaml_SINGLE_QUOTED_SCALAR_STYLE: + return yaml_emitter_write_single_quoted_scalar(emitter, + emitter.scalar_data.value, + !emitter.simple_key_context) + + case yaml_DOUBLE_QUOTED_SCALAR_STYLE: + return yaml_emitter_write_double_quoted_scalar(emitter, + emitter.scalar_data.value, + !emitter.simple_key_context) + + case yaml_LITERAL_SCALAR_STYLE: + return yaml_emitter_write_literal_scalar(emitter, + emitter.scalar_data.value) + + case yaml_FOLDED_SCALAR_STYLE: + return yaml_emitter_write_folded_scalar(emitter, + emitter.scalar_data.value) + + default: + panic("unknown scalar") + } + + return false +} + +/* + * Check if a %YAML directive is valid. + */ + +func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, + version_directive yaml_version_directive_t) bool { + if version_directive.major != 1 || version_directive.minor != 1 { + return yaml_emitter_set_emitter_error(emitter, + "incompatible %YAML directive") + } + + return true +} + +/* + * Check if a %TAG directive is valid. + */ + +func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, + tag_directive *yaml_tag_directive_t) bool { + handle := tag_directive.handle + prefix := tag_directive.prefix + + if len(handle) == 0 { + return yaml_emitter_set_emitter_error(emitter, + "tag handle must not be empty") + } + + if handle[0] != '!' { + return yaml_emitter_set_emitter_error(emitter, + "tag handle must start with '!'") + } + + if handle[len(handle)-1] != '!' { + return yaml_emitter_set_emitter_error(emitter, + "tag handle must end with '!'") + } + + for i := 1; i < len(handle)-1; width(handle[i]) { + if !is_alpha(handle[i]) { + return yaml_emitter_set_emitter_error(emitter, + "tag handle must contain alphanumerical characters only") + } + } + + if len(prefix) == 0 { + return yaml_emitter_set_emitter_error(emitter, + "tag prefix must not be empty") + } + + return true +} + +/* + * Check if an anchor is valid. + */ + +func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, + anchor []byte, alias bool) bool { + if len(anchor) == 0 { + errmsg := "alias value must not be empty" + if !alias { + errmsg = "anchor value must not be empty" + } + return yaml_emitter_set_emitter_error(emitter, errmsg) + } + + for i := 0; i < len(anchor); i += width(anchor[i]) { + if !is_alpha(anchor[i]) { + errmsg := "alias value must contain alphanumerical characters only" + if !alias { + errmsg = "anchor value must contain alphanumerical characters only" + } + return yaml_emitter_set_emitter_error(emitter, errmsg) + } + } + + emitter.anchor_data.anchor = anchor + emitter.anchor_data.alias = alias + + return true +} + +/* + * Check if a tag is valid. + */ + +func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { + if len(tag) == 0 { + return yaml_emitter_set_emitter_error(emitter, + "tag value must not be empty") + } + + for i := range emitter.tag_directives { + tag_directive := &emitter.tag_directives[i] + if bytes.HasPrefix(tag, tag_directive.prefix) { + emitter.tag_data.handle = tag_directive.handle + emitter.tag_data.suffix = tag[len(tag_directive.prefix):] + return true + } + } + + emitter.tag_data.suffix = tag + + return true +} + +/* + * Check if a scalar is valid. + */ + +func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { + block_indicators := false + flow_indicators := false + line_breaks := false + special_characters := false + + leading_space := false + leading_break := false + trailing_space := false + trailing_break := false + break_space := false + space_break := false + + preceeded_by_whitespace := false + followed_by_whitespace := false + previous_space := false + previous_break := false + + emitter.scalar_data.value = value + + if len(value) == 0 { + emitter.scalar_data.multiline = false + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = true + emitter.scalar_data.single_quoted_allowed = true + emitter.scalar_data.block_allowed = false + + return true + } + + if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || + (value[0] == '.' && value[1] == '.' && value[2] == '.')) { + block_indicators = true + flow_indicators = true + } + + preceeded_by_whitespace = true + + for i, w := 0, 0; i < len(value); i += w { + w = width(value[i]) + followed_by_whitespace = i+w >= len(value) || is_blankz_at(value, w) + + if i == 0 { + switch value[i] { + case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': + flow_indicators = true + block_indicators = true + case '?', ':': + flow_indicators = true + if followed_by_whitespace { + block_indicators = true + } + case '-': + if followed_by_whitespace { + flow_indicators = true + block_indicators = true + } + } + } else { + switch value[i] { + case ',', '?', '[', ']', '{', '}': + flow_indicators = true + case ':': + flow_indicators = true + if followed_by_whitespace { + block_indicators = true + } + case '#': + if preceeded_by_whitespace { + flow_indicators = true + block_indicators = true + } + } + } + + if !is_printable_at(value, i) || (!is_ascii(value[i]) && !emitter.unicode) { + special_characters = true + } + + if is_break_at(value, i) { + line_breaks = true + } + + if is_space(value[i]) { + if i == 0 { + leading_space = true + } + if i+w == len(value) { + trailing_space = true + } + if previous_break { + break_space = true + } + previous_space = true + previous_break = false + } else if is_break_at(value, i) { + if i == 0 { + leading_break = true + } + if i+width(value[i]) == len(value) { + trailing_break = true + } + if previous_space { + space_break = true + } + previous_space = false + previous_break = true + } else { + previous_space = false + previous_break = false + } + + preceeded_by_whitespace = is_blankz_at(value, i) + } + + emitter.scalar_data.multiline = line_breaks + + emitter.scalar_data.flow_plain_allowed = true + emitter.scalar_data.block_plain_allowed = true + emitter.scalar_data.single_quoted_allowed = true + emitter.scalar_data.block_allowed = true + + if leading_space || leading_break || trailing_space || trailing_break { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + } + + if trailing_space { + emitter.scalar_data.block_allowed = false + } + + if break_space { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + emitter.scalar_data.single_quoted_allowed = false + } + + if space_break || special_characters { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + emitter.scalar_data.single_quoted_allowed = false + emitter.scalar_data.block_allowed = false + } + + if line_breaks { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + } + + if flow_indicators { + emitter.scalar_data.flow_plain_allowed = false + } + + if block_indicators { + emitter.scalar_data.block_plain_allowed = false + } + + return true +} + +/* + * Check if the event data is valid. + */ + +func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { + emitter.anchor_data.anchor = nil + emitter.tag_data.handle = nil + emitter.tag_data.suffix = nil + emitter.scalar_data.value = nil + + switch event.event_type { + case yaml_ALIAS_EVENT: + if !yaml_emitter_analyze_anchor(emitter, + event.anchor, true) { + return false + } + + case yaml_SCALAR_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, + event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || + (!event.implicit && + !event.quoted_implicit)) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + if !yaml_emitter_analyze_scalar(emitter, event.value) { + return false + } + case yaml_SEQUENCE_START_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, + event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || + !event.implicit) { + if !yaml_emitter_analyze_tag(emitter, + event.tag) { + return false + } + } + case yaml_MAPPING_START_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, + event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || + !event.implicit) { + if !yaml_emitter_analyze_tag(emitter, + event.tag) { + return false + } + } + + } + return true +} + +/* + * Write the BOM character. + */ + +func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { + if !flush(emitter) { + return false + } + + pos := emitter.buffer_pos + emitter.buffer[pos] = '\xEF' + emitter.buffer[pos+1] = '\xBB' + emitter.buffer[pos+2] = '\xBF' + emitter.buffer_pos += 3 + return true +} + +func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { + indent := emitter.indent + if indent < 0 { + indent = 0 + } + + if !emitter.indention || emitter.column > indent || + (emitter.column == indent && !emitter.whitespace) { + if !put_break(emitter) { + return false + } + } + + for emitter.column < indent { + if !put(emitter, ' ') { + return false + } + } + + emitter.whitespace = true + emitter.indention = true + + return true +} + +func yaml_emitter_write_indicator(emitter *yaml_emitter_t, + indicator []byte, need_whitespace bool, + is_whitespace bool, is_indention bool) bool { + if need_whitespace && !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + + ind_pos := 0 + for ind_pos < len(indicator) { + if !write(emitter, indicator, &ind_pos) { + return false + } + } + + emitter.whitespace = is_whitespace + emitter.indention = (emitter.indention && is_indention) + emitter.open_ended = false + + return true +} + +func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { + pos := 0 + for pos < len(value) { + if !write(emitter, value, &pos) { + return false + } + } + + emitter.whitespace = false + emitter.indention = false + + return true +} + +func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { + if !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + + pos := 0 + for pos < len(value) { + if !write(emitter, value, &pos) { + return false + } + } + + emitter.whitespace = false + emitter.indention = false + + return true +} + +func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, + need_whitespace bool) bool { + if need_whitespace && !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + + for i := 0; i < len(value); { + write_it := false + switch value[i] { + case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', + '.', '!', '~', '*', '\'', '(', ')', '[', ']': + write_it = true + default: + write_it = is_alpha(value[i]) + } + if write_it { + if !write(emitter, value, &i) { + return false + } + } else { + w := width(value[i]) + for j := 0; j < w; j++ { + val := value[i] + i++ + + if !put(emitter, '%') { + return false + } + c := val >> 4 + if c < 10 { + c += '0' + } else { + c += 'A' - 10 + } + if !put(emitter, c) { + return false + } + + c = val & 0x0f + if c < 10 { + c += '0' + } else { + c += 'A' - 10 + } + if !put(emitter, c) { + return false + } + + } + } + } + + emitter.whitespace = false + emitter.indention = false + + return true +} + +func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, + allow_breaks bool) bool { + spaces := false + breaks := false + + if !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + + for i := 0; i < len(value); { + if is_space(value[i]) { + if allow_breaks && !spaces && + emitter.column > emitter.best_width && + !is_space(value[i+1]) { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + spaces = true + } else if is_break_at(value, i) { + if !breaks && value[i] == '\n' { + if !put_break(emitter) { + return false + } + } + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + spaces = false + breaks = false + } + } + + emitter.whitespace = false + emitter.indention = false + if emitter.root_context { + emitter.open_ended = true + } + + return true +} + +func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, + allow_breaks bool) bool { + spaces := false + breaks := false + + if !yaml_emitter_write_indicator(emitter, []byte("'"), true, false, false) { + return false + } + + for i := 0; i < len(value); { + if is_space(value[i]) { + if allow_breaks && !spaces && + emitter.column > emitter.best_width && + i > 0 && i < len(value)-1 && + !is_space(value[i+1]) { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + spaces = true + } else if is_break_at(value, i) { + if !breaks && value[i] == '\n' { + if !put_break(emitter) { + return false + } + } + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if value[i] == '\'' { + if !put(emitter, '\'') { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + spaces = false + breaks = false + } + } + + if !yaml_emitter_write_indicator(emitter, []byte("'"), false, false, false) { + return false + } + + emitter.whitespace = false + emitter.indention = false + + return true +} + +func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, + allow_breaks bool) bool { + + spaces := false + + if !yaml_emitter_write_indicator(emitter, []byte("\""), true, false, false) { + return false + } + + for i := 0; i < len(value); { + if !is_printable_at(value, i) || (!emitter.unicode && !is_ascii(value[i])) || + is_bom_at(value, i) || is_break_at(value, i) || + value[i] == '"' || value[i] == '\\' { + octet := value[i] + + var w int + var v rune + switch { + case octet&0x80 == 0x00: + w, v = 1, rune(octet&0x7F) + case octet&0xE0 == 0xC0: + w, v = 2, rune(octet&0x1F) + case octet&0xF0 == 0xE0: + w, v = 3, rune(octet&0x0F) + case octet&0xF8 == 0xF0: + w, v = 4, rune(octet&0x07) + } + + for k := 1; k < w; k++ { + octet = value[i+k] + v = (v << 6) + (rune(octet) & 0x3F) + } + i += w + + if !put(emitter, '\\') { + return false + } + + switch v { + case 0x00: + if !put(emitter, '0') { + return false + } + case 0x07: + if !put(emitter, 'a') { + return false + } + case 0x08: + if !put(emitter, 'b') { + return false + } + case 0x09: + if !put(emitter, 't') { + return false + } + + case 0x0A: + if !put(emitter, 'n') { + return false + } + + case 0x0B: + if !put(emitter, 'v') { + return false + } + + case 0x0C: + if !put(emitter, 'f') { + return false + } + + case 0x0D: + if !put(emitter, 'r') { + return false + } + + case 0x1B: + if !put(emitter, 'e') { + return false + } + case 0x22: + if !put(emitter, '"') { + return false + } + case 0x5C: + if !put(emitter, '\\') { + return false + } + case 0x85: + if !put(emitter, 'N') { + return false + } + + case 0xA0: + if !put(emitter, '_') { + return false + } + + case 0x2028: + if !put(emitter, 'L') { + return false + } + + case 0x2029: + if !put(emitter, 'P') { + return false + } + default: + if v <= 0xFF { + if !put(emitter, 'x') { + return false + } + w = 2 + } else if v <= 0xFFFF { + if !put(emitter, 'u') { + return false + } + w = 4 + } else { + if !put(emitter, 'U') { + return false + } + w = 8 + } + for k := (w - 1) * 4; k >= 0; k -= 4 { + digit := byte((v >> uint(k)) & 0x0F) + c := digit + '0' + if c > 9 { + c = digit + 'A' - 10 + } + if !put(emitter, c) { + return false + } + } + } + spaces = false + } else if is_space(value[i]) { + if allow_breaks && !spaces && + emitter.column > emitter.best_width && + i > 0 && i < len(value)-1 { + if !yaml_emitter_write_indent(emitter) { + return false + } + if is_space(value[i+1]) { + if !put(emitter, '\\') { + return false + } + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + spaces = true + } else { + if !write(emitter, value, &i) { + return false + } + spaces = false + } + } + + if !yaml_emitter_write_indicator(emitter, []byte("\""), false, false, false) { + return false + } + + emitter.whitespace = false + emitter.indention = false + + return true +} + +func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { + + if is_space(value[0]) || is_break_at(value, 0) { + indent_hint := []byte{'0' + byte(emitter.best_indent)} + if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { + return false + } + } + + emitter.open_ended = false + + var chomp_hint [1]byte + if len(value) == 0 { + chomp_hint[0] = '-' + } else { + i := len(value) - 1 + for value[i]&0xC0 == 0x80 { + i-- + } + + if !is_break_at(value, i) { + chomp_hint[0] = '-' + } else if i == 0 { + chomp_hint[0] = '+' + emitter.open_ended = true + } else { + for value[i]&0xC0 == 0x80 { + i-- + } + + if is_break_at(value, i) { + chomp_hint[0] = '+' + emitter.open_ended = true + } + } + } + + if chomp_hint[0] != 0 { + if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { + return false + } + } + + return true +} + +func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { + + breaks := true + + if !yaml_emitter_write_indicator(emitter, []byte("|"), true, false, false) { + return false + } + + if !yaml_emitter_write_block_scalar_hints(emitter, value) { + return false + } + + if !put_break(emitter) { + return false + } + + emitter.indention = true + emitter.whitespace = true + + for i := 0; i < len(value); { + if is_break_at(value, i) { + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + breaks = false + } + } + + return true +} + +func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { + breaks := true + leading_spaces := true + + if !yaml_emitter_write_indicator(emitter, []byte(">"), true, false, false) { + return false + } + if !yaml_emitter_write_block_scalar_hints(emitter, value) { + return false + } + if !put_break(emitter) { + return false + } + emitter.indention = true + emitter.whitespace = true + + for i := 0; i < len(value); { + if is_break_at(value, i) { + if !breaks && !leading_spaces && value[i] == '\n' { + k := i + for is_break_at(value, k) { + k += width(value[k]) + } + if !is_blankz_at(value, k) { + if !put_break(emitter) { + return false + } + } + } + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + leading_spaces = is_blank(value[i]) + } + if !breaks && is_space(value[i]) && !is_space(value[i+1]) && + emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + emitter.indention = false + breaks = false + } + } + + return true +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/encode.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/encode.go new file mode 100644 index 0000000..fd99180 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/encode.go @@ -0,0 +1,395 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "bytes" + "encoding/base64" + "io" + "math" + "reflect" + "regexp" + "sort" + "strconv" + "time" +) + +var ( + timeTimeType = reflect.TypeOf(time.Time{}) + marshalerType = reflect.TypeOf(new(Marshaler)).Elem() + numberType = reflect.TypeOf(Number("")) + nonPrintable = regexp.MustCompile("[^\t\n\r\u0020-\u007E\u0085\u00A0-\uD7FF\uE000-\uFFFD]") + multiline = regexp.MustCompile("\n|\u0085|\u2028|\u2029") + + shortTags = map[string]string{ + yaml_NULL_TAG: "!!null", + yaml_BOOL_TAG: "!!bool", + yaml_STR_TAG: "!!str", + yaml_INT_TAG: "!!int", + yaml_FLOAT_TAG: "!!float", + yaml_TIMESTAMP_TAG: "!!timestamp", + yaml_SEQ_TAG: "!!seq", + yaml_MAP_TAG: "!!map", + yaml_BINARY_TAG: "!!binary", + } +) + +type Marshaler interface { + MarshalYAML() (tag string, value interface{}, err error) +} + +// An Encoder writes JSON objects to an output stream. +type Encoder struct { + w io.Writer + emitter yaml_emitter_t + event yaml_event_t + flow bool + err error +} + +func Marshal(v interface{}) ([]byte, error) { + b := bytes.Buffer{} + e := NewEncoder(&b) + err := e.Encode(v) + return b.Bytes(), err +} + +// NewEncoder returns a new encoder that writes to w. +func NewEncoder(w io.Writer) *Encoder { + e := &Encoder{w: w} + yaml_emitter_initialize(&e.emitter) + yaml_emitter_set_output_writer(&e.emitter, e.w) + yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING) + e.emit() + yaml_document_start_event_initialize(&e.event, nil, nil, true) + e.emit() + + return e +} + +func (e *Encoder) Encode(v interface{}) (err error) { + defer recovery(&err) + + if e.err != nil { + return e.err + } + + e.marshal("", reflect.ValueOf(v), true) + + yaml_document_end_event_initialize(&e.event, true) + e.emit() + e.emitter.open_ended = false + yaml_stream_end_event_initialize(&e.event) + e.emit() + + return nil +} + +func (e *Encoder) emit() { + if !yaml_emitter_emit(&e.emitter, &e.event) { + panic("bad emit") + } +} + +func (e *Encoder) marshal(tag string, v reflect.Value, allowAddr bool) { + vt := v.Type() + + if vt.Implements(marshalerType) { + e.emitMarshaler(tag, v) + return + } + + if vt.Kind() != reflect.Ptr && allowAddr { + if reflect.PtrTo(vt).Implements(marshalerType) { + e.emitAddrMarshaler(tag, v) + return + } + } + + switch v.Kind() { + case reflect.Interface: + if v.IsNil() { + e.emitNil() + } else { + e.marshal(tag, v.Elem(), allowAddr) + } + case reflect.Map: + e.emitMap(tag, v) + case reflect.Ptr: + if v.IsNil() { + e.emitNil() + } else { + e.marshal(tag, v.Elem(), true) + } + case reflect.Struct: + e.emitStruct(tag, v) + case reflect.Slice: + e.emitSlice(tag, v) + case reflect.String: + e.emitString(tag, v) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + e.emitInt(tag, v) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + e.emitUint(tag, v) + case reflect.Float32, reflect.Float64: + e.emitFloat(tag, v) + case reflect.Bool: + e.emitBool(tag, v) + default: + panic("Can't marshal type yet: " + v.Type().String()) + } +} + +func (e *Encoder) emitMap(tag string, v reflect.Value) { + e.mapping(tag, func() { + var keys stringValues = v.MapKeys() + sort.Sort(keys) + for _, k := range keys { + e.marshal("", k, true) + e.marshal("", v.MapIndex(k), true) + } + }) +} + +func (e *Encoder) emitStruct(tag string, v reflect.Value) { + if v.Type() == timeTimeType { + e.emitTime(tag, v) + return + } + + fields := cachedTypeFields(v.Type()) + + e.mapping(tag, func() { + for _, f := range fields { + fv := fieldByIndex(v, f.index) + if !fv.IsValid() || f.omitEmpty && isEmptyValue(fv) { + continue + } + + e.marshal("", reflect.ValueOf(f.name), true) + e.flow = f.flow + e.marshal("", fv, true) + } + }) +} + +func (e *Encoder) emitTime(tag string, v reflect.Value) { + t := v.Interface().(time.Time) + bytes, _ := t.MarshalText() + e.emitScalar(string(bytes), "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func isEmptyValue(v reflect.Value) bool { + switch v.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + } + return false +} + +func (e *Encoder) mapping(tag string, f func()) { + implicit := tag == "" + style := yaml_BLOCK_MAPPING_STYLE + if e.flow { + e.flow = false + style = yaml_FLOW_MAPPING_STYLE + } + yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style) + e.emit() + + f() + + yaml_mapping_end_event_initialize(&e.event) + e.emit() +} + +func (e *Encoder) emitSlice(tag string, v reflect.Value) { + if v.Type() == byteSliceType { + e.emitBase64(tag, v) + return + } + + implicit := tag == "" + style := yaml_BLOCK_SEQUENCE_STYLE + if e.flow { + e.flow = false + style = yaml_FLOW_SEQUENCE_STYLE + } + yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style) + e.emit() + + n := v.Len() + for i := 0; i < n; i++ { + e.marshal("", v.Index(i), true) + } + + yaml_sequence_end_event_initialize(&e.event) + e.emit() +} + +func (e *Encoder) emitBase64(tag string, v reflect.Value) { + if v.IsNil() { + e.emitNil() + return + } + + s := v.Bytes() + + dst := make([]byte, base64.StdEncoding.EncodedLen(len(s))) + + base64.StdEncoding.Encode(dst, s) + e.emitScalar(string(dst), "", yaml_BINARY_TAG, yaml_DOUBLE_QUOTED_SCALAR_STYLE) +} + +func (e *Encoder) emitString(tag string, v reflect.Value) { + var style yaml_scalar_style_t + s := v.String() + + if nonPrintable.MatchString(s) { + e.emitBase64(tag, v) + return + } + + if v.Type() == numberType { + style = yaml_PLAIN_SCALAR_STYLE + } else { + event := yaml_event_t{ + implicit: true, + value: []byte(s), + } + + rtag, _ := resolveInterface(event, false) + if tag == "" && rtag != yaml_STR_TAG { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } else if multiline.MatchString(s) { + style = yaml_LITERAL_SCALAR_STYLE + } else { + style = yaml_PLAIN_SCALAR_STYLE + } + } + + e.emitScalar(s, "", tag, style) +} + +func (e *Encoder) emitBool(tag string, v reflect.Value) { + s := strconv.FormatBool(v.Bool()) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *Encoder) emitInt(tag string, v reflect.Value) { + s := strconv.FormatInt(v.Int(), 10) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *Encoder) emitUint(tag string, v reflect.Value) { + s := strconv.FormatUint(v.Uint(), 10) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *Encoder) emitFloat(tag string, v reflect.Value) { + f := v.Float() + + var s string + switch { + case math.IsNaN(f): + s = ".nan" + case math.IsInf(f, 1): + s = "+.inf" + case math.IsInf(f, -1): + s = "-.inf" + default: + s = strconv.FormatFloat(f, 'g', -1, v.Type().Bits()) + } + + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *Encoder) emitNil() { + e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE) +} + +func (e *Encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) { + implicit := tag == "" + if !implicit { + style = yaml_PLAIN_SCALAR_STYLE + } + + stag := shortTags[tag] + if stag == "" { + stag = tag + } + + yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(stag), []byte(value), implicit, implicit, style) + e.emit() +} + +func (e *Encoder) emitMarshaler(tag string, v reflect.Value) { + if v.Kind() == reflect.Ptr && v.IsNil() { + e.emitNil() + return + } + + m := v.Interface().(Marshaler) + if m == nil { + e.emitNil() + return + } + t, val, err := m.MarshalYAML() + if err != nil { + panic(err) + } + if val == nil { + e.emitNil() + return + } + + e.marshal(t, reflect.ValueOf(val), false) +} + +func (e *Encoder) emitAddrMarshaler(tag string, v reflect.Value) { + if !v.CanAddr() { + e.marshal(tag, v, false) + return + } + + va := v.Addr() + if va.IsNil() { + e.emitNil() + return + } + + m := v.Interface().(Marshaler) + t, val, err := m.MarshalYAML() + if err != nil { + panic(err) + } + + if val == nil { + e.emitNil() + return + } + + e.marshal(t, reflect.ValueOf(val), false) +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/encode_test.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/encode_test.go new file mode 100644 index 0000000..2506a10 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/encode_test.go @@ -0,0 +1,634 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "bytes" + "errors" + "math" + "time" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("Encode", func() { + var buf *bytes.Buffer + var enc *Encoder + + BeforeEach(func() { + buf = &bytes.Buffer{} + enc = NewEncoder(buf) + }) + + Context("Scalars", func() { + It("handles strings", func() { + err := enc.Encode("abc") + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`abc +`)) + + }) + + It("handles really short strings", func() { + err := enc.Encode(".") + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`. +`)) + + }) + + It("encodes strings with multilines", func() { + err := enc.Encode("a\nc") + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`|- + a + c +`)) + + }) + + It("handles strings that match known scalars", func() { + err := enc.Encode("true") + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`"true" +`)) + + }) + + Context("handles ints", func() { + It("handles ints", func() { + err := enc.Encode(13) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("13\n")) + }) + + It("handles uints", func() { + err := enc.Encode(uint64(1)) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("1\n")) + }) + }) + + Context("handles floats", func() { + It("handles float32", func() { + err := enc.Encode(float32(1.234)) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("1.234\n")) + + }) + + It("handles float64", func() { + err := enc.Encode(float64(1.2e23)) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("1.2e+23\n")) + }) + + It("handles NaN", func() { + err := enc.Encode(math.NaN()) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(".nan\n")) + }) + + It("handles infinity", func() { + err := enc.Encode(math.Inf(-1)) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("-.inf\n")) + }) + }) + + It("handles bools", func() { + err := enc.Encode(true) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("true\n")) + }) + + It("handles time.Time", func() { + t := time.Now() + err := enc.Encode(t) + Expect(err).NotTo(HaveOccurred()) + bytes, _ := t.MarshalText() + Expect(buf.String()).To(Equal(string(bytes) + "\n")) + }) + + Context("Null", func() { + It("fails on nil", func() { + err := enc.Encode(nil) + Expect(err).To(HaveOccurred()) + }) + }) + + It("handles []byte", func() { + err := enc.Encode([]byte{'a', 'b', 'c'}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("!!binary YWJj\n")) + }) + + Context("Ptrs", func() { + It("handles ptr of a type", func() { + p := new(int) + *p = 10 + err := enc.Encode(p) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("10\n")) + }) + + It("handles nil ptr", func() { + var p *int + err := enc.Encode(p) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("null\n")) + }) + }) + + Context("Structs", func() { + It("handles simple structs", func() { + type batter struct { + Name string + HR int64 + AVG float64 + } + + batters := []batter{ + batter{Name: "Mark McGwire", HR: 65, AVG: 0.278}, + batter{Name: "Sammy Sosa", HR: 63, AVG: 0.288}, + } + err := enc.Encode(batters) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`- Name: Mark McGwire + HR: 65 + AVG: 0.278 +- Name: Sammy Sosa + HR: 63 + AVG: 0.288 +`)) + + }) + + It("handles tagged structs", func() { + type batter struct { + Name string `yaml:"name"` + HR int64 + AVG float64 `yaml:"avg"` + } + + batters := []batter{ + batter{Name: "Mark McGwire", HR: 65, AVG: 0.278}, + batter{Name: "Sammy Sosa", HR: 63, AVG: 0.288}, + } + err := enc.Encode(batters) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`- name: Mark McGwire + HR: 65 + avg: 0.278 +- name: Sammy Sosa + HR: 63 + avg: 0.288 +`)) + + }) + + It("handles nested structs", func() { + type nestedConfig struct { + AString string `yaml:"str"` + Integer int `yaml:"int"` + } + type config struct { + TopString string + Nested nestedConfig + } + + cfg := config{ + TopString: "def", + Nested: nestedConfig{ + AString: "abc", + Integer: 123, + }, + } + + err := enc.Encode(cfg) + Expect(err).NotTo(HaveOccurred()) + + Expect(buf.String()).To(Equal(`TopString: def +Nested: + str: abc + int: 123 +`)) + + }) + + It("handles inline structs", func() { + type NestedConfig struct { + AString string `yaml:"str"` + Integer int `yaml:"int"` + } + type config struct { + TopString string + NestedConfig + } + + cfg := config{ + TopString: "def", + NestedConfig: NestedConfig{ + AString: "abc", + Integer: 123, + }, + } + + err := enc.Encode(cfg) + Expect(err).NotTo(HaveOccurred()) + + Expect(buf.String()).To(Equal(`TopString: def +str: abc +int: 123 +`)) + + }) + + It("handles inline structs with conflicts", func() { + type NestedConfig struct { + AString string `yaml:"str"` + Integer int `yaml:"int"` + } + type config struct { + AString string `yaml:"str"` + NestedConfig + } + + cfg := config{ + AString: "def", + NestedConfig: NestedConfig{ + AString: "abc", + Integer: 123, + }, + } + + err := enc.Encode(cfg) + Expect(err).NotTo(HaveOccurred()) + + Expect(buf.String()).To(Equal(`str: def +int: 123 +`)) + + }) + + }) + + }) + + Context("Sequence", func() { + It("handles slices", func() { + val := []string{"a", "b", "c"} + err := enc.Encode(val) + Expect(err).NotTo(HaveOccurred()) + + Expect(buf.String()).To(Equal(`- a +- b +- c +`)) + + }) + }) + + Context("Maps", func() { + It("Encodes simple maps", func() { + err := enc.Encode(&map[string]string{ + "name": "Mark McGwire", + "hr": "65", + "avg": "0.278", + }) + Expect(err).NotTo(HaveOccurred()) + + Expect(buf.String()).To(Equal(`avg: "0.278" +hr: "65" +name: Mark McGwire +`)) + }) + + It("sorts by key when strings otherwise by kind", func() { + err := enc.Encode(&map[interface{}]string{ + 1.2: "float", + 8: "integer", + "name": "Mark McGwire", + "hr": "65", + "avg": "0.278", + }) + Expect(err).NotTo(HaveOccurred()) + + Expect(buf.String()).To(Equal(`8: integer +1.2: float +avg: "0.278" +hr: "65" +name: Mark McGwire +`)) + }) + + It("encodes mix types", func() { + err := enc.Encode(&map[string]interface{}{ + "name": "Mark McGwire", + "hr": 65, + "avg": 0.278, + }) + Expect(err).NotTo(HaveOccurred()) + + Expect(buf.String()).To(Equal(`avg: 0.278 +hr: 65 +name: Mark McGwire +`)) + }) + }) + + Context("Sequence of Maps", func() { + It("encodes", func() { + err := enc.Encode([]map[string]interface{}{ + {"name": "Mark McGwire", + "hr": 65, + "avg": 0.278, + }, + {"name": "Sammy Sosa", + "hr": 63, + "avg": 0.288, + }, + }) + Expect(err).NotTo(HaveOccurred()) + + Expect(buf.String()).To(Equal(`- avg: 0.278 + hr: 65 + name: Mark McGwire +- avg: 0.288 + hr: 63 + name: Sammy Sosa +`)) + + }) + }) + + Context("Maps of Sequence", func() { + It("encodes", func() { + err := enc.Encode(map[string][]interface{}{ + "name": []interface{}{"Mark McGwire", "Sammy Sosa"}, + "hr": []interface{}{65, 63}, + "avg": []interface{}{0.278, 0.288}, + }) + Expect(err).NotTo(HaveOccurred()) + + Expect(buf.String()).To(Equal(`avg: +- 0.278 +- 0.288 +hr: +- 65 +- 63 +name: +- Mark McGwire +- Sammy Sosa +`)) + + }) + }) + + Context("Flow", func() { + It("flows structs", func() { + type i struct { + A string + } + type o struct { + I i `yaml:"i,flow"` + } + + err := enc.Encode(o{ + I: i{A: "abc"}, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`i: {A: abc} +`)) + + }) + + It("flows sequences", func() { + type i struct { + A string + } + type o struct { + I []i `yaml:"i,flow"` + } + + err := enc.Encode(o{ + I: []i{{A: "abc"}}, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`i: [{A: abc}] +`)) + + }) + }) + + Context("Omit empty", func() { + It("omits nil ptrs", func() { + type i struct { + A *string `yaml:"a,omitempty"` + } + type o struct { + I []i `yaml:"i,flow"` + } + + err := enc.Encode(o{ + I: []i{{A: nil}}, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`i: [{}] +`)) + + }) + + }) + + Context("Skip field", func() { + It("does not include the field", func() { + type a struct { + B string `yaml:"-"` + C string + } + + err := enc.Encode(a{B: "b", C: "c"}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`C: c +`)) + + }) + }) + + Context("Marshaler support", func() { + Context("Receiver is a value", func() { + It("uses the Marshaler interface when a value", func() { + err := enc.Encode(hasMarshaler{Value: 123}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("123\n")) + }) + + It("uses the Marshaler interface when a pointer", func() { + err := enc.Encode(&hasMarshaler{Value: "abc"}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`abc +`)) + }) + + Context("when it fails", func() { + It("returns an error", func() { + err := enc.Encode(&hasMarshaler{Value: "abc", Error: errors.New("fail")}) + Expect(err).To(MatchError("fail")) + }) + }) + }) + + Context("Receiver is a pointer", func() { + It("uses the Marshaler interface when a pointer", func() { + err := enc.Encode(&hasPtrMarshaler{Value: map[string]string{"a": "b"}}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`a: b +`)) + + }) + + It("skips the Marshaler when its a value", func() { + err := enc.Encode(hasPtrMarshaler{Value: map[string]string{"a": "b"}}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`Tag: "" +Value: + a: b +Error: null +`)) + + }) + + Context("the receiver is nil", func() { + var ptr *hasPtrMarshaler + + Context("when it fails", func() { + It("returns an error", func() { + err := enc.Encode(&hasPtrMarshaler{Value: "abc", Error: errors.New("fail")}) + Expect(err).To(MatchError("fail")) + }) + }) + + It("returns a null", func() { + err := enc.Encode(ptr) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`null +`)) + + }) + + It("returns a null value for ptr types", func() { + err := enc.Encode(map[string]*hasPtrMarshaler{"a": ptr}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`a: null +`)) + + }) + + It("panics when used as a nil interface", func() { + Expect(func() { enc.Encode(map[string]Marshaler{"a": ptr}) }).To(Panic()) + }) + }) + + Context("the receiver has a nil value", func() { + ptr := &hasPtrMarshaler{Value: nil} + + It("returns null", func() { + err := enc.Encode(ptr) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`null +`)) + + }) + + Context("in a map", func() { + It("returns a null value for ptr types", func() { + err := enc.Encode(map[string]*hasPtrMarshaler{"a": ptr}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`a: null +`)) + + }) + + It("returns a null value for interface types", func() { + err := enc.Encode(map[string]Marshaler{"a": ptr}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`a: null +`)) + + }) + }) + + Context("in a slice", func() { + It("returns a null value for ptr types", func() { + err := enc.Encode([]*hasPtrMarshaler{ptr}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`- null +`)) + + }) + + It("returns a null value for interface types", func() { + err := enc.Encode([]Marshaler{ptr}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(`- null +`)) + + }) + }) + }) + }) + }) + + Context("Number type", func() { + It("encodes as a number", func() { + n := Number("12345") + err := enc.Encode(n) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("12345\n")) + }) + }) +}) + +type hasMarshaler struct { + Value interface{} + Error error +} + +func (m hasMarshaler) MarshalYAML() (string, interface{}, error) { + return "", m.Value, m.Error +} + +func (m hasMarshaler) UnmarshalYAML(tag string, value interface{}) error { + m.Value = value + return nil +} + +type hasPtrMarshaler struct { + Tag string + Value interface{} + Error error +} + +func (m *hasPtrMarshaler) MarshalYAML() (string, interface{}, error) { + return "", m.Value, m.Error +} + +func (m *hasPtrMarshaler) UnmarshalYAML(tag string, value interface{}) error { + m.Tag = tag + m.Value = value + return nil +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_1.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_1.yaml new file mode 100644 index 0000000..d12e671 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_1.yaml @@ -0,0 +1,3 @@ +- Mark McGwire +- Sammy Sosa +- Ken Griffey diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_10.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_10.yaml new file mode 100644 index 0000000..61808f6 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_10.yaml @@ -0,0 +1,8 @@ +--- +hr: + - Mark McGwire + # Following node labeled SS + - &SS Sammy Sosa +rbi: + - *SS # Subsequent occurrence + - Ken Griffey diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_11.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_11.yaml new file mode 100644 index 0000000..9123ce2 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_11.yaml @@ -0,0 +1,9 @@ +? - Detroit Tigers + - Chicago cubs +: + - 2001-07-23 + +? [ New York Yankees, + Atlanta Braves ] +: [ 2001-07-02, 2001-08-12, + 2001-08-14 ] diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_12.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_12.yaml new file mode 100644 index 0000000..1fc33f9 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_12.yaml @@ -0,0 +1,8 @@ +--- +# products purchased +- item : Super Hoop + quantity: 1 +- item : Basketball + quantity: 4 +- item : Big Shoes + quantity: 1 diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_13.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_13.yaml new file mode 100644 index 0000000..13fb656 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_13.yaml @@ -0,0 +1,4 @@ +# ASCII Art +--- | + \//||\/|| + // || ||__ diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_14.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_14.yaml new file mode 100644 index 0000000..59943de --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_14.yaml @@ -0,0 +1,4 @@ +--- + Mark McGwire's + year was crippled + by a knee injury. diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_15.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_15.yaml new file mode 100644 index 0000000..80b89a6 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_15.yaml @@ -0,0 +1,8 @@ +> + Sammy Sosa completed another + fine season with great stats. + + 63 Home Runs + 0.288 Batting Average + + What a year! diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_15_dumped.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_15_dumped.yaml new file mode 100644 index 0000000..cc2d963 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_15_dumped.yaml @@ -0,0 +1,7 @@ +> + Sammy Sosa completed another fine season with great stats. + + 63 Home Runs + 0.288 Batting Average + + What a year! \ No newline at end of file diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_16.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_16.yaml new file mode 100644 index 0000000..9f66d88 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_16.yaml @@ -0,0 +1,7 @@ +name: Mark McGwire +accomplishment: > + Mark set a major league + home run record in 1998. +stats: | + 65 Home Runs + 0.278 Batting Average diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17.yaml new file mode 100644 index 0000000..3e899c0 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17.yaml @@ -0,0 +1,7 @@ +unicode: "Sosa did fine.\u263A" +control: "\b1998\t1999\t2000\n" +hexesc: "\x0D\x0A is \r\n" + +single: '"Howdy!" he cried.' +quoted: ' # not a ''comment''.' +tie-fighter: '|\-*-/|' diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_control.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_control.yaml new file mode 100644 index 0000000..59398a6 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_control.yaml @@ -0,0 +1,2 @@ +control: "\b1998\t1999\t2000\n" + diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_hexesc.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_hexesc.yaml new file mode 100644 index 0000000..7ddff26 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_hexesc.yaml @@ -0,0 +1,2 @@ +hexesc: "\x0D\x0A is \r\n" + diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_quoted.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_quoted.yaml new file mode 100644 index 0000000..bedc4a5 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_quoted.yaml @@ -0,0 +1,2 @@ +quoted: ' # not a ''comment''.' + diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_single.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_single.yaml new file mode 100644 index 0000000..c3fe6aa --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_single.yaml @@ -0,0 +1 @@ +single: '"Howdy!" he cried.' diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_tie_fighter.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_tie_fighter.yaml new file mode 100644 index 0000000..9d82173 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_tie_fighter.yaml @@ -0,0 +1 @@ +tie-fighter: '|\-*-/|' diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_unicode.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_unicode.yaml new file mode 100644 index 0000000..2b378bd --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_17_unicode.yaml @@ -0,0 +1,2 @@ +unicode: "Sosa did fine.\u263A" + diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_18.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_18.yaml new file mode 100644 index 0000000..e0a8bfa --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_18.yaml @@ -0,0 +1,6 @@ +plain: + This unquoted scalar + spans many lines. + +quoted: "So does this + quoted scalar.\n" diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_19.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_19.yaml new file mode 100644 index 0000000..524f21d --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_19.yaml @@ -0,0 +1,4 @@ +canonical: 12345 +decimal: +12_345 +octal: 014 +hexadecimal: 0xC diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_2.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_2.yaml new file mode 100644 index 0000000..7b7ec94 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_2.yaml @@ -0,0 +1,3 @@ +hr: 65 # Home runs +avg: 0.278 # Batting average +rbi: 147 # Runs Batted In diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_20.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_20.yaml new file mode 100644 index 0000000..727b261 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_20.yaml @@ -0,0 +1,5 @@ +canonical: 1.23015e+3 +exponential: 12.3015e+02 +fixed: 1_230.15 +negative infinity: -.inf +not a number: .NaN diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_21.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_21.yaml new file mode 100644 index 0000000..c065b2a --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_21.yaml @@ -0,0 +1,4 @@ +null: ~ +true: yes +false: no +string: '12345' diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_22.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_22.yaml new file mode 100644 index 0000000..aaac185 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_22.yaml @@ -0,0 +1,4 @@ +canonical: 2001-12-15T02:59:43.1Z +iso8601: 2001-12-14t21:59:43.10-05:00 +spaced: 2001-12-14 21:59:43.10 -5 +date: 2002-12-14 diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23.yaml new file mode 100644 index 0000000..adbe4e6 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23.yaml @@ -0,0 +1,14 @@ +--- +not-date: !!str 2002-04-28 + +picture: !!binary "\ + R0lGODlhDAAMAIQAAP//9/X\ + 17unp5WZmZgAAAOfn515eXv\ + Pz7Y6OjuDg4J+fn5OTk6enp\ + 56enmleECcgggoBADs=" + +application specific tag: !something | + The semantics of the tag + above may be different for + different documents. + diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_application.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_application.yaml new file mode 100644 index 0000000..03cc760 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_application.yaml @@ -0,0 +1,5 @@ +--- +application specific tag: !something | + The semantics of the tag + above may be different for + different documents. diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_non_date.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_non_date.yaml new file mode 100644 index 0000000..2e95415 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_non_date.yaml @@ -0,0 +1,3 @@ +--- +not-date: !!str 2002-04-28 + diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_picture.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_picture.yaml new file mode 100644 index 0000000..b87063e --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_23_picture.yaml @@ -0,0 +1,9 @@ +--- +picture: !!binary "\ + R0lGODlhDAAMAIQAAP//9/X\ + 17unp5WZmZgAAAOfn515eXv\ + Pz7Y6OjuDg4J+fn5OTk6enp\ + 56enmleECcgggoBADs=" + + + \ No newline at end of file diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_24.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_24.yaml new file mode 100644 index 0000000..1180757 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_24.yaml @@ -0,0 +1,14 @@ +%TAG ! tag:clarkevans.com,2002: +--- !shape + # Use the ! handle for presenting + # tag:clarkevans.com,2002:circle +- !circle + center: &ORIGIN {x: 73, y: 129} + radius: 7 +- !line + start: *ORIGIN + finish: { x: 89, y: 102 } +- !label + start: *ORIGIN + color: 0xFFEEBB + text: Pretty vector drawing. diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_24_dumped.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_24_dumped.yaml new file mode 100644 index 0000000..1742cd2 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_24_dumped.yaml @@ -0,0 +1,11 @@ +!shape +- !circle + center: &id001 {x: 73, y: 129} + radius: 7 +- !line + finish: {x: 89, y: 102} + start: *id001 +- !label + color: 0xFFEEBB + start: *id001 + text: Pretty vector drawing. \ No newline at end of file diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_25.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_25.yaml new file mode 100644 index 0000000..769ac31 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_25.yaml @@ -0,0 +1,7 @@ +# sets are represented as a +# mapping where each key is +# associated with the empty string +--- !!set +? Mark McGwire +? Sammy Sosa +? Ken Griff diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_26.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_26.yaml new file mode 100644 index 0000000..3143763 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_26.yaml @@ -0,0 +1,7 @@ +# ordered maps are represented as +# a sequence of mappings, with +# each mapping having one key +--- !!omap +- Mark McGwire: 65 +- Sammy Sosa: 63 +- Ken Griffy: 58 diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_27.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_27.yaml new file mode 100644 index 0000000..395e79c --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_27.yaml @@ -0,0 +1,29 @@ +--- ! +invoice: 34843 +date : 2001-01-23 +billTo: &id001 + given : Chris + family : Dumars + address: + lines: | + 458 Walkman Dr. + Suite #292 + city : Royal Oak + state : MI + postal : 48046 +shipTo: *id001 +product: + - sku : BL394D + quantity : 4 + description : Basketball + price : 450.00 + - sku : BL4438H + quantity : 1 + description : Super Hoop + price : 2392.00 +tax : 251.42 +total: 4443.52 +comments: + Late afternoon is best. + Backup contact is Nancy + Billsmer @ 338-4338. diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_27_dumped.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_27_dumped.yaml new file mode 100644 index 0000000..51a89b8 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_27_dumped.yaml @@ -0,0 +1,20 @@ +!!org.yaml.snakeyaml.Invoice +billTo: &id001 + address: + city: Royal Oak + lines: | + 458 Walkman Dr. + Suite #292 + postal: '48046' + state: MI + family: Dumars + given: Chris +comments: Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338. +date: '2001-01-23' +invoice: 34843 +product: +- {description: Basketball, price: 450.0, quantity: 4, sku: BL394D} +- {description: Super Hoop, price: 2392.0, quantity: 1, sku: BL4438H} +shipTo: *id001 +tax: 251.42 +total: 4443.52 \ No newline at end of file diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_28.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_28.yaml new file mode 100644 index 0000000..eb5fb8a --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_28.yaml @@ -0,0 +1,29 @@ +--- +Time: 2001-11-23 15:01:42 -5 +User: ed +Warning: + This is an error message + for the log file +--- +Time: 2001-11-23 15:02:31 -5 +User: ed +Warning: + A slightly different error + message. +--- +Date: 2001-11-23 15:03:17 -5 +User: ed +Fatal: + Unknown variable "bar" +Stack: + - file: TopClass.py + line: 23 + code: | + x = MoreObject("345\n") + - file: MoreClass.py + line: 58 + code: |- + foo = bar + + + diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_3.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_3.yaml new file mode 100644 index 0000000..2c884b7 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_3.yaml @@ -0,0 +1,8 @@ +american: + - Boston Red Sox + - Detroit Tigers + - New York Yankees +national: + - New York Mets + - Chicago Cubs + - Atlanta Braves \ No newline at end of file diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_4.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_4.yaml new file mode 100644 index 0000000..430f6b3 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_4.yaml @@ -0,0 +1,8 @@ +- + name: Mark McGwire + hr: 65 + avg: 0.278 +- + name: Sammy Sosa + hr: 63 + avg: 0.288 diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_5.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_5.yaml new file mode 100644 index 0000000..cdd7770 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_5.yaml @@ -0,0 +1,3 @@ +- [name , hr, avg ] +- [Mark McGwire, 65, 0.278] +- [Sammy Sosa , 63, 0.288] diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_6.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_6.yaml new file mode 100644 index 0000000..7a957b2 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_6.yaml @@ -0,0 +1,5 @@ +Mark McGwire: {hr: 65, avg: 0.278} +Sammy Sosa: { + hr: 63, + avg: 0.288 + } diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_7.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_7.yaml new file mode 100644 index 0000000..bc711d5 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_7.yaml @@ -0,0 +1,10 @@ +# Ranking of 1998 home runs +--- +- Mark McGwire +- Sammy Sosa +- Ken Griffey + +# Team ranking +--- +- Chicago Cubs +- St Louis Cardinals diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_8.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_8.yaml new file mode 100644 index 0000000..05e102d --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_8.yaml @@ -0,0 +1,10 @@ +--- +time: 20:03:20 +player: Sammy Sosa +action: strike (miss) +... +--- +time: 20:03:47 +player: Sammy Sosa +action: grand slam +... diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_9.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_9.yaml new file mode 100644 index 0000000..e264180 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example2_9.yaml @@ -0,0 +1,8 @@ +--- +hr: # 1998 hr ranking + - Mark McGwire + - Sammy Sosa +rbi: + # 1998 rbi ranking + - Sammy Sosa + - Ken Griffey diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example_empty.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/example_empty.yaml new file mode 100644 index 0000000..e69de29 diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/map.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/map.yaml new file mode 100644 index 0000000..022446d --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/map.yaml @@ -0,0 +1,6 @@ +# Unordered set of key: value pairs. +Block style: !!map + Clark : Evans + Brian : Ingerson + Oren : Ben-Kiki +Flow style: !!map { Clark: Evans, Brian: Ingerson, Oren: Ben-Kiki } diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/map_mixed_tags.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/map_mixed_tags.yaml new file mode 100644 index 0000000..a5d35b0 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/map_mixed_tags.yaml @@ -0,0 +1,6 @@ +# Unordered set of key: value pairs. +Block style: ! + Clark : Evans + Brian : Ingerson + Oren : Ben-Kiki +Flow style: { Clark: Evans, Brian: Ingerson, Oren: Ben-Kiki } diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/merge.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/merge.yaml new file mode 100644 index 0000000..ee4a48f --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/merge.yaml @@ -0,0 +1,27 @@ +--- +- &CENTER { x: 1, y: 2 } +- &LEFT { x: 0, y: 2 } +- &BIG { r: 10 } +- &SMALL { r: 1 } + +# All the following maps are equal: + +- # Explicit keys + x: 1 + y: 2 + r: 10 + label: center/big + +- # Merge one map + << : *CENTER + r: 10 + label: center/big + +- # Merge multiple maps + << : [ *CENTER, *BIG ] + label: center/big + +- # Override + << : [ *BIG, *LEFT, *SMALL ] + x: 1 + label: center/big diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/omap.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/omap.yaml new file mode 100644 index 0000000..4fa0f45 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/omap.yaml @@ -0,0 +1,8 @@ +# Explicitly typed ordered map (dictionary). +Bestiary: !!omap + - aardvark: African pig-like ant eater. Ugly. + - anteater: South-American ant eater. Two species. + - anaconda: South-American constrictor snake. Scaly. + # Etc. +# Flow style +Numbers: !!omap [ one: 1, two: 2, three : 3 ] diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/pairs.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/pairs.yaml new file mode 100644 index 0000000..05f55b9 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/pairs.yaml @@ -0,0 +1,7 @@ +# Explicitly typed pairs. +Block tasks: !!pairs + - meeting: with team. + - meeting: with boss. + - break: lunch. + - meeting: with client. +Flow tasks: !!pairs [ meeting: with team, meeting: with boss ] diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/seq.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/seq.yaml new file mode 100644 index 0000000..5849115 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/seq.yaml @@ -0,0 +1,14 @@ +# Ordered sequence of nodes +Block style: !!seq +- Mercury # Rotates - no light/dark sides. +- Venus # Deadliest. Aptly named. +- Earth # Mostly dirt. +- Mars # Seems empty. +- Jupiter # The king. +- Saturn # Pretty. +- Uranus # Where the sun hardly shines. +- Neptune # Boring. No rings. +- Pluto # You call this a planet? +Flow style: !!seq [ Mercury, Venus, Earth, Mars, # Rocks + Jupiter, Saturn, Uranus, Neptune, # Gas + Pluto ] # Overrated diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/set.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/set.yaml new file mode 100644 index 0000000..e05dc88 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/set.yaml @@ -0,0 +1,7 @@ +# Explicitly typed set. +baseball players: !!set + ? Mark McGwire + ? Sammy Sosa + ? Ken Griffey +# Flow style +baseball teams: !!set { Boston Red Sox, Detroit Tigers, New York Yankees } diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/v.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/v.yaml new file mode 100644 index 0000000..81c5d51 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/v.yaml @@ -0,0 +1,4 @@ +--- # New schema +link with: + - = : library1.dll + version: 1.2 diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/value.yaml b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/value.yaml new file mode 100644 index 0000000..3eb7919 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/fixtures/specification/types/value.yaml @@ -0,0 +1,10 @@ +--- # Old schema +link with: + - library1.dll + - library2.dll +--- # New schema +link with: + - = : library1.dll + version: 1.2 + - = : library2.dll + version: 2.3 diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/libyaml-LICENSE b/vendor/github.com/cloudfoundry-incubator/candiedyaml/libyaml-LICENSE new file mode 100644 index 0000000..050ced2 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/libyaml-LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2006 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/parser.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/parser.go new file mode 100644 index 0000000..8d38e30 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/parser.go @@ -0,0 +1,1230 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "bytes" +) + +/* + * The parser implements the following grammar: + * + * stream ::= STREAM-START implicit_document? explicit_document* STREAM-END + * implicit_document ::= block_node DOCUMENT-END* + * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* + * block_node_or_indentless_sequence ::= + * ALIAS + * | properties (block_content | indentless_block_sequence)? + * | block_content + * | indentless_block_sequence + * block_node ::= ALIAS + * | properties block_content? + * | block_content + * flow_node ::= ALIAS + * | properties flow_content? + * | flow_content + * properties ::= TAG ANCHOR? | ANCHOR TAG? + * block_content ::= block_collection | flow_collection | SCALAR + * flow_content ::= flow_collection | SCALAR + * block_collection ::= block_sequence | block_mapping + * flow_collection ::= flow_sequence | flow_mapping + * block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END + * indentless_sequence ::= (BLOCK-ENTRY block_node?)+ + * block_mapping ::= BLOCK-MAPPING_START + * ((KEY block_node_or_indentless_sequence?)? + * (VALUE block_node_or_indentless_sequence?)?)* + * BLOCK-END + * flow_sequence ::= FLOW-SEQUENCE-START + * (flow_sequence_entry FLOW-ENTRY)* + * flow_sequence_entry? + * FLOW-SEQUENCE-END + * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * flow_mapping ::= FLOW-MAPPING-START + * (flow_mapping_entry FLOW-ENTRY)* + * flow_mapping_entry? + * FLOW-MAPPING-END + * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + */ + +/* + * Peek the next token in the token queue. + */ +func peek_token(parser *yaml_parser_t) *yaml_token_t { + if parser.token_available || yaml_parser_fetch_more_tokens(parser) { + return &parser.tokens[parser.tokens_head] + } + return nil +} + +/* + * Remove the next token from the queue (must be called after peek_token). + */ +func skip_token(parser *yaml_parser_t) { + parser.token_available = false + parser.tokens_parsed++ + parser.stream_end_produced = parser.tokens[parser.tokens_head].token_type == yaml_STREAM_END_TOKEN + parser.tokens_head++ +} + +/* + * Get the next event. + */ + +func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { + /* Erase the event object. */ + *event = yaml_event_t{} + + /* No events after the end of the stream or error. */ + + if parser.stream_end_produced || parser.error != yaml_NO_ERROR || + parser.state == yaml_PARSE_END_STATE { + return true + } + + /* Generate the next event. */ + + return yaml_parser_state_machine(parser, event) +} + +/* + * Set parser error. + */ + +func yaml_parser_set_parser_error(parser *yaml_parser_t, + problem string, problem_mark YAML_mark_t) bool { + parser.error = yaml_PARSER_ERROR + parser.problem = problem + parser.problem_mark = problem_mark + + return false +} + +func yaml_parser_set_parser_error_context(parser *yaml_parser_t, + context string, context_mark YAML_mark_t, + problem string, problem_mark YAML_mark_t) bool { + parser.error = yaml_PARSER_ERROR + parser.context = context + parser.context_mark = context_mark + parser.problem = problem + parser.problem_mark = problem_mark + + return false +} + +/* + * State dispatcher. + */ + +func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { + switch parser.state { + case yaml_PARSE_STREAM_START_STATE: + return yaml_parser_parse_stream_start(parser, event) + + case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: + return yaml_parser_parse_document_start(parser, event, true) + + case yaml_PARSE_DOCUMENT_START_STATE: + return yaml_parser_parse_document_start(parser, event, false) + + case yaml_PARSE_DOCUMENT_CONTENT_STATE: + return yaml_parser_parse_document_content(parser, event) + + case yaml_PARSE_DOCUMENT_END_STATE: + return yaml_parser_parse_document_end(parser, event) + + case yaml_PARSE_BLOCK_NODE_STATE: + return yaml_parser_parse_node(parser, event, true, false) + + case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: + return yaml_parser_parse_node(parser, event, true, true) + + case yaml_PARSE_FLOW_NODE_STATE: + return yaml_parser_parse_node(parser, event, false, false) + + case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: + return yaml_parser_parse_block_sequence_entry(parser, event, true) + + case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_block_sequence_entry(parser, event, false) + + case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_indentless_sequence_entry(parser, event) + + case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: + return yaml_parser_parse_block_mapping_key(parser, event, true) + + case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: + return yaml_parser_parse_block_mapping_key(parser, event, false) + + case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: + return yaml_parser_parse_block_mapping_value(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: + return yaml_parser_parse_flow_sequence_entry(parser, event, true) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_flow_sequence_entry(parser, event, false) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) + + case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: + return yaml_parser_parse_flow_mapping_key(parser, event, true) + + case yaml_PARSE_FLOW_MAPPING_KEY_STATE: + return yaml_parser_parse_flow_mapping_key(parser, event, false) + + case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: + return yaml_parser_parse_flow_mapping_value(parser, event, false) + + case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: + return yaml_parser_parse_flow_mapping_value(parser, event, true) + } + + panic("invalid parser state") +} + +/* + * Parse the production: + * stream ::= STREAM-START implicit_document? explicit_document* STREAM-END + * ************ + */ + +func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type != yaml_STREAM_START_TOKEN { + return yaml_parser_set_parser_error(parser, + "did not find expected ", token.start_mark) + } + + parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE + *event = yaml_event_t{ + event_type: yaml_STREAM_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + encoding: token.encoding, + } + skip_token(parser) + + return true +} + +/* + * Parse the productions: + * implicit_document ::= block_node DOCUMENT-END* + * * + * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* + * ************************* + */ + +func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, + implicit bool) bool { + + token := peek_token(parser) + if token == nil { + return false + } + + /* Parse extra document end indicators. */ + + if !implicit { + for token.token_type == yaml_DOCUMENT_END_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } + + /* Parse an implicit document. */ + + if implicit && token.token_type != yaml_VERSION_DIRECTIVE_TOKEN && + token.token_type != yaml_TAG_DIRECTIVE_TOKEN && + token.token_type != yaml_DOCUMENT_START_TOKEN && + token.token_type != yaml_STREAM_END_TOKEN { + if !yaml_parser_process_directives(parser, nil, nil) { + return false + } + + parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) + parser.state = yaml_PARSE_BLOCK_NODE_STATE + + *event = yaml_event_t{ + event_type: yaml_DOCUMENT_START_EVENT, + implicit: true, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + } else if token.token_type != yaml_STREAM_END_TOKEN { + /* Parse an explicit document. */ + var version_directive *yaml_version_directive_t + var tag_directives []yaml_tag_directive_t + + start_mark := token.start_mark + if !yaml_parser_process_directives(parser, &version_directive, + &tag_directives) { + return false + } + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type != yaml_DOCUMENT_START_TOKEN { + yaml_parser_set_parser_error(parser, + "did not find expected ", token.start_mark) + return false + } + + parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) + parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE + + end_mark := token.end_mark + + *event = yaml_event_t{ + event_type: yaml_DOCUMENT_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + version_directive: version_directive, + tag_directives: tag_directives, + implicit: false, + } + skip_token(parser) + } else { + /* Parse the stream end. */ + parser.state = yaml_PARSE_END_STATE + + *event = yaml_event_t{ + event_type: yaml_STREAM_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + skip_token(parser) + } + return true +} + +/* + * Parse the productions: + * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* + * *********** + */ + +func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type == yaml_VERSION_DIRECTIVE_TOKEN || + token.token_type == yaml_TAG_DIRECTIVE_TOKEN || + token.token_type == yaml_DOCUMENT_START_TOKEN || + token.token_type == yaml_DOCUMENT_END_TOKEN || + token.token_type == yaml_STREAM_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + return yaml_parser_process_empty_scalar(parser, event, + token.start_mark) + } else { + return yaml_parser_parse_node(parser, event, true, false) + } +} + +/* + * Parse the productions: + * implicit_document ::= block_node DOCUMENT-END* + * ************* + * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* + * ************* + */ + +func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { + implicit := true + + token := peek_token(parser) + if token == nil { + return false + } + + start_mark, end_mark := token.start_mark, token.start_mark + + if token.token_type == yaml_DOCUMENT_END_TOKEN { + end_mark = token.end_mark + skip_token(parser) + implicit = false + } + + parser.tag_directives = parser.tag_directives[:0] + + parser.state = yaml_PARSE_DOCUMENT_START_STATE + *event = yaml_event_t{ + event_type: yaml_DOCUMENT_END_EVENT, + start_mark: start_mark, + end_mark: end_mark, + implicit: implicit, + } + + return true +} + +/* + * Parse the productions: + * block_node_or_indentless_sequence ::= + * ALIAS + * ***** + * | properties (block_content | indentless_block_sequence)? + * ********** * + * | block_content | indentless_block_sequence + * * + * block_node ::= ALIAS + * ***** + * | properties block_content? + * ********** * + * | block_content + * * + * flow_node ::= ALIAS + * ***** + * | properties flow_content? + * ********** * + * | flow_content + * * + * properties ::= TAG ANCHOR? | ANCHOR TAG? + * ************************* + * block_content ::= block_collection | flow_collection | SCALAR + * ****** + * flow_content ::= flow_collection | SCALAR + * ****** + */ + +func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, + block bool, indentless_sequence bool) bool { + + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type == yaml_ALIAS_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + event_type: yaml_ALIAS_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + anchor: token.value, + } + skip_token(parser) + return true + } else { + start_mark, end_mark := token.start_mark, token.start_mark + + var tag_handle []byte + var tag_suffix, anchor []byte + var tag_mark YAML_mark_t + if token.token_type == yaml_ANCHOR_TOKEN { + anchor = token.value + start_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type == yaml_TAG_TOKEN { + tag_handle = token.value + tag_suffix = token.suffix + tag_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } else if token.token_type == yaml_TAG_TOKEN { + tag_handle = token.value + tag_suffix = token.suffix + start_mark, tag_mark = token.start_mark, token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type == yaml_ANCHOR_TOKEN { + anchor = token.value + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + + } + } + + var tag []byte + if tag_handle != nil { + if len(tag_handle) == 0 { + tag = tag_suffix + tag_handle = nil + tag_suffix = nil + } else { + for i := range parser.tag_directives { + tag_directive := &parser.tag_directives[i] + if bytes.Equal(tag_directive.handle, tag_handle) { + tag = append([]byte(nil), tag_directive.prefix...) + tag = append(tag, tag_suffix...) + tag_handle = nil + tag_suffix = nil + break + } + } + if len(tag) == 0 { + yaml_parser_set_parser_error_context(parser, + "while parsing a node", start_mark, + "found undefined tag handle", tag_mark) + return false + } + } + } + + implicit := len(tag) == 0 + if indentless_sequence && token.token_type == yaml_BLOCK_ENTRY_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE + + *event = yaml_event_t{ + event_type: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), + } + + return true + } else { + if token.token_type == yaml_SCALAR_TOKEN { + plain_implicit := false + quoted_implicit := false + end_mark = token.end_mark + if (token.style == yaml_PLAIN_SCALAR_STYLE && len(tag) == 0) || + (len(tag) == 1 && tag[0] == '!') { + plain_implicit = true + } else if len(tag) == 0 { + quoted_implicit = true + } + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + event_type: yaml_SCALAR_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + value: token.value, + implicit: plain_implicit, + quoted_implicit: quoted_implicit, + style: yaml_style_t(token.style), + } + + skip_token(parser) + return true + } else if token.token_type == yaml_FLOW_SEQUENCE_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE + + *event = yaml_event_t{ + event_type: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), + } + + return true + } else if token.token_type == yaml_FLOW_MAPPING_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE + + *event = yaml_event_t{ + event_type: yaml_MAPPING_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), + } + + return true + } else if block && token.token_type == yaml_BLOCK_SEQUENCE_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE + + *event = yaml_event_t{ + event_type: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), + } + + return true + } else if block && token.token_type == yaml_BLOCK_MAPPING_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE + + *event = yaml_event_t{ + event_type: yaml_MAPPING_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), + } + return true + } else if len(anchor) > 0 || len(tag) > 0 { + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + event_type: yaml_SCALAR_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + quoted_implicit: false, + style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), + } + return true + } else { + msg := "while parsing a block node" + if !block { + msg = "while parsing a flow node" + } + yaml_parser_set_parser_error_context(parser, msg, start_mark, + "did not find expected node content", token.start_mark) + return false + } + } + } + + return false +} + +/* + * Parse the productions: + * block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END + * ******************** *********** * ********* + */ + +func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, + event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type == yaml_BLOCK_ENTRY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type != yaml_BLOCK_ENTRY_TOKEN && + token.token_type != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, true, false) + } else { + parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } else if token.token_type == yaml_BLOCK_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + *event = yaml_event_t{ + event_type: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + skip_token(parser) + return true + } else { + mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + return yaml_parser_set_parser_error_context(parser, + "while parsing a block collection", mark, + "did not find expected '-' indicator", token.start_mark) + } +} + +/* + * Parse the productions: + * indentless_sequence ::= (BLOCK-ENTRY block_node?)+ + * *********** * + */ + +func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, + event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type == yaml_BLOCK_ENTRY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type != yaml_BLOCK_ENTRY_TOKEN && + token.token_type != yaml_KEY_TOKEN && + token.token_type != yaml_VALUE_TOKEN && + token.token_type != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, true, false) + } else { + parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } else { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + event_type: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.start_mark, + } + return true + } +} + +/* + * Parse the productions: + * block_mapping ::= BLOCK-MAPPING_START + * ******************* + * ((KEY block_node_or_indentless_sequence?)? + * *** * + * (VALUE block_node_or_indentless_sequence?)?)* + * + * BLOCK-END + * ********* + */ + +func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, + event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type == yaml_KEY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type != yaml_KEY_TOKEN && + token.token_type != yaml_VALUE_TOKEN && + token.token_type != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, true, true) + } else { + parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } else if token.token_type == yaml_BLOCK_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + *event = yaml_event_t{ + event_type: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + skip_token(parser) + return true + } else { + mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + return yaml_parser_set_parser_error_context(parser, + "while parsing a block mapping", mark, + "did not find expected key", token.start_mark) + } +} + +/* + * Parse the productions: + * block_mapping ::= BLOCK-MAPPING_START + * + * ((KEY block_node_or_indentless_sequence?)? + * + * (VALUE block_node_or_indentless_sequence?)?)* + * ***** * + * BLOCK-END + * + */ + +func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, + event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type == yaml_VALUE_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type != yaml_KEY_TOKEN && + token.token_type != yaml_VALUE_TOKEN && + token.token_type != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) + return yaml_parser_parse_node(parser, event, true, true) + } else { + parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } else { + parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) + } +} + +/* + * Parse the productions: + * flow_sequence ::= FLOW-SEQUENCE-START + * ******************* + * (flow_sequence_entry FLOW-ENTRY)* + * * ********** + * flow_sequence_entry? + * * + * FLOW-SEQUENCE-END + * ***************** + * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * * + */ + +func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, + event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type != yaml_FLOW_SEQUENCE_END_TOKEN { + if !first { + if token.token_type == yaml_FLOW_ENTRY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } else { + mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + return yaml_parser_set_parser_error_context(parser, + "while parsing a flow sequence", mark, + "did not find expected ',' or ']'", token.start_mark) + } + } + + if token.token_type == yaml_KEY_TOKEN { + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE + *event = yaml_event_t{ + event_type: yaml_MAPPING_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + implicit: true, + style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), + } + + skip_token(parser) + return true + } else if token.token_type != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + *event = yaml_event_t{ + event_type: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + skip_token(parser) + return true +} + +/* + * Parse the productions: + * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * *** * + */ + +func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, + event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type != yaml_VALUE_TOKEN && + token.token_type != yaml_FLOW_ENTRY_TOKEN && + token.token_type != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } else { + mark := token.end_mark + skip_token(parser) + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } +} + +/* + * Parse the productions: + * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * ***** * + */ + +func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, + event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type == yaml_VALUE_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type != yaml_FLOW_ENTRY_TOKEN && + token.token_type != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +/* + * Parse the productions: + * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * * + */ + +func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, + event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE + *event = yaml_event_t{ + event_type: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.start_mark, + } + + return true +} + +/* + * Parse the productions: + * flow_mapping ::= FLOW-MAPPING-START + * ****************** + * (flow_mapping_entry FLOW-ENTRY)* + * * ********** + * flow_mapping_entry? + * ****************** + * FLOW-MAPPING-END + * **************** + * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * * *** * + */ + +func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, + event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.token_type != yaml_FLOW_MAPPING_END_TOKEN { + if !first { + if token.token_type == yaml_FLOW_ENTRY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } else { + mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + return yaml_parser_set_parser_error_context(parser, + "while parsing a flow mapping", mark, + "did not find expected ',' or '}'", token.start_mark) + } + } + + if token.token_type == yaml_KEY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type != yaml_VALUE_TOKEN && + token.token_type != yaml_FLOW_ENTRY_TOKEN && + token.token_type != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } else { + parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, + token.start_mark) + } + } else if token.token_type != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + *event = yaml_event_t{ + event_type: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + skip_token(parser) + return true +} + +/* + * Parse the productions: + * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * * ***** * + */ + +func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, + event *yaml_event_t, empty bool) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if empty { + parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, + token.start_mark) + } + + if token.token_type == yaml_VALUE_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.token_type != yaml_FLOW_ENTRY_TOKEN && + token.token_type != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + + parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +/* + * Generate an empty scalar event. + */ + +func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, + mark YAML_mark_t) bool { + *event = yaml_event_t{ + event_type: yaml_SCALAR_EVENT, + start_mark: mark, + end_mark: mark, + value: nil, + implicit: true, + style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), + } + + return true +} + +/* + * Parse directives. + */ + +func yaml_parser_process_directives(parser *yaml_parser_t, + version_directive_ref **yaml_version_directive_t, + tag_directives_ref *[]yaml_tag_directive_t) bool { + + token := peek_token(parser) + if token == nil { + return false + } + + var version_directive *yaml_version_directive_t + var tag_directives []yaml_tag_directive_t + + for token.token_type == yaml_VERSION_DIRECTIVE_TOKEN || + token.token_type == yaml_TAG_DIRECTIVE_TOKEN { + if token.token_type == yaml_VERSION_DIRECTIVE_TOKEN { + if version_directive != nil { + yaml_parser_set_parser_error(parser, + "found duplicate %YAML directive", token.start_mark) + return false + } + if token.major != 1 || + token.minor != 1 { + yaml_parser_set_parser_error(parser, + "found incompatible YAML document", token.start_mark) + return false + } + version_directive = &yaml_version_directive_t{ + major: token.major, + minor: token.minor, + } + } else if token.token_type == yaml_TAG_DIRECTIVE_TOKEN { + value := yaml_tag_directive_t{ + handle: token.value, + prefix: token.prefix, + } + + if !yaml_parser_append_tag_directive(parser, value, false, + token.start_mark) { + return false + } + tag_directives = append(tag_directives, value) + } + + skip_token(parser) + token := peek_token(parser) + if token == nil { + return false + } + } + + for i := range default_tag_directives { + if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { + return false + } + } + + if version_directive_ref != nil { + *version_directive_ref = version_directive + } + if tag_directives_ref != nil { + *tag_directives_ref = tag_directives + } + + return true +} + +/* + * Append a tag directive to the directives stack. + */ + +func yaml_parser_append_tag_directive(parser *yaml_parser_t, + value yaml_tag_directive_t, allow_duplicates bool, mark YAML_mark_t) bool { + for i := range parser.tag_directives { + tag := &parser.tag_directives[i] + if bytes.Equal(value.handle, tag.handle) { + if allow_duplicates { + return true + } + return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) + } + } + + parser.tag_directives = append(parser.tag_directives, value) + return true +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/parser_test.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/parser_test.go new file mode 100644 index 0000000..534ca5e --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/parser_test.go @@ -0,0 +1,81 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "io/ioutil" + "os" + "path/filepath" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var parses = func(filename string) { + It("parses "+filename, func() { + file, err := os.Open(filename) + Expect(err).To(BeNil()) + + parser := yaml_parser_t{} + yaml_parser_initialize(&parser) + yaml_parser_set_input_reader(&parser, file) + + failed := false + event := yaml_event_t{} + + for { + if !yaml_parser_parse(&parser, &event) { + failed = true + println("---", parser.error, parser.problem, parser.context, "line", parser.problem_mark.line, "col", parser.problem_mark.column) + break + } + + if event.event_type == yaml_STREAM_END_EVENT { + break + } + } + + file.Close() + + // msg := "SUCCESS" + // if failed { + // msg = "FAILED" + // if parser.error != yaml_NO_ERROR { + // m := parser.problem_mark + // fmt.Printf("ERROR: (%s) %s @ line: %d col: %d\n", + // parser.context, parser.problem, m.line, m.column) + // } + // } + Expect(failed).To(BeFalse()) + }) +} + +var parseYamls = func(dirname string) { + fileInfos, err := ioutil.ReadDir(dirname) + if err != nil { + panic(err.Error()) + } + + for _, fileInfo := range fileInfos { + if !fileInfo.IsDir() { + parses(filepath.Join(dirname, fileInfo.Name())) + } + } +} + +var _ = Describe("Parser", func() { + parseYamls("fixtures/specification") + parseYamls("fixtures/specification/types") +}) diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/reader.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/reader.go new file mode 100644 index 0000000..5631da2 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/reader.go @@ -0,0 +1,465 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "io" +) + +/* + * Set the reader error and return 0. + */ + +func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, + offset int, value int) bool { + parser.error = yaml_READER_ERROR + parser.problem = problem + parser.problem_offset = offset + parser.problem_value = value + + return false +} + +/* + * Byte order marks. + */ +const ( + BOM_UTF8 = "\xef\xbb\xbf" + BOM_UTF16LE = "\xff\xfe" + BOM_UTF16BE = "\xfe\xff" +) + +/* + * Determine the input stream encoding by checking the BOM symbol. If no BOM is + * found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. + */ + +func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { + /* Ensure that we had enough bytes in the raw buffer. */ + for !parser.eof && + len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { + if !yaml_parser_update_raw_buffer(parser) { + return false + } + } + + /* Determine the encoding. */ + raw := parser.raw_buffer + pos := parser.raw_buffer_pos + remaining := len(raw) - pos + if remaining >= 2 && + raw[pos] == BOM_UTF16LE[0] && raw[pos+1] == BOM_UTF16LE[1] { + parser.encoding = yaml_UTF16LE_ENCODING + parser.raw_buffer_pos += 2 + parser.offset += 2 + } else if remaining >= 2 && + raw[pos] == BOM_UTF16BE[0] && raw[pos+1] == BOM_UTF16BE[1] { + parser.encoding = yaml_UTF16BE_ENCODING + parser.raw_buffer_pos += 2 + parser.offset += 2 + } else if remaining >= 3 && + raw[pos] == BOM_UTF8[0] && raw[pos+1] == BOM_UTF8[1] && raw[pos+2] == BOM_UTF8[2] { + parser.encoding = yaml_UTF8_ENCODING + parser.raw_buffer_pos += 3 + parser.offset += 3 + } else { + parser.encoding = yaml_UTF8_ENCODING + } + + return true +} + +/* + * Update the raw buffer. + */ + +func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { + size_read := 0 + + /* Return if the raw buffer is full. */ + if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { + return true + } + + /* Return on EOF. */ + + if parser.eof { + return true + } + + /* Move the remaining bytes in the raw buffer to the beginning. */ + if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { + copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) + } + parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] + parser.raw_buffer_pos = 0 + + /* Call the read handler to fill the buffer. */ + size_read, err := parser.read_handler(parser, + parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) + parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] + + if err == io.EOF { + parser.eof = true + } else if err != nil { + return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), + parser.offset, -1) + } + + return true +} + +/* + * Ensure that the buffer contains at least `length` characters. + * Return 1 on success, 0 on failure. + * + * The length is supposed to be significantly less that the buffer size. + */ + +func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { + /* Read handler must be set. */ + if parser.read_handler == nil { + panic("read handler must be set") + } + + /* If the EOF flag is set and the raw buffer is empty, do nothing. */ + + if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { + return true + } + + /* Return if the buffer contains enough characters. */ + + if parser.unread >= length { + return true + } + + /* Determine the input encoding if it is not known yet. */ + + if parser.encoding == yaml_ANY_ENCODING { + if !yaml_parser_determine_encoding(parser) { + return false + } + } + + /* Move the unread characters to the beginning of the buffer. */ + buffer_end := len(parser.buffer) + if 0 < parser.buffer_pos && + parser.buffer_pos < buffer_end { + copy(parser.buffer, parser.buffer[parser.buffer_pos:]) + buffer_end -= parser.buffer_pos + parser.buffer_pos = 0 + } else if parser.buffer_pos == buffer_end { + buffer_end = 0 + parser.buffer_pos = 0 + } + + parser.buffer = parser.buffer[:cap(parser.buffer)] + + /* Fill the buffer until it has enough characters. */ + first := true + for parser.unread < length { + /* Fill the raw buffer if necessary. */ + + if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { + if !yaml_parser_update_raw_buffer(parser) { + parser.buffer = parser.buffer[:buffer_end] + return false + } + } + first = false + + /* Decode the raw buffer. */ + for parser.raw_buffer_pos != len(parser.raw_buffer) { + var value rune + var w int + + raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos + incomplete := false + + /* Decode the next character. */ + + switch parser.encoding { + case yaml_UTF8_ENCODING: + + /* + * Decode a UTF-8 character. Check RFC 3629 + * (http://www.ietf.org/rfc/rfc3629.txt) for more details. + * + * The following table (taken from the RFC) is used for + * decoding. + * + * Char. number range | UTF-8 octet sequence + * (hexadecimal) | (binary) + * --------------------+------------------------------------ + * 0000 0000-0000 007F | 0xxxxxxx + * 0000 0080-0000 07FF | 110xxxxx 10xxxxxx + * 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx + * 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + * + * Additionally, the characters in the range 0xD800-0xDFFF + * are prohibited as they are reserved for use with UTF-16 + * surrogate pairs. + */ + + /* Determine the length of the UTF-8 sequence. */ + + octet := parser.raw_buffer[parser.raw_buffer_pos] + w = width(octet) + + /* Check if the leading octet is valid. */ + + if w == 0 { + return yaml_parser_set_reader_error(parser, + "invalid leading UTF-8 octet", + parser.offset, int(octet)) + } + + /* Check if the raw buffer contains an incomplete character. */ + + if w > raw_unread { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-8 octet sequence", + parser.offset, -1) + } + incomplete = true + break + } + + /* Decode the leading octet. */ + switch { + case octet&0x80 == 0x00: + value = rune(octet & 0x7F) + case octet&0xE0 == 0xC0: + value = rune(octet & 0x1F) + case octet&0xF0 == 0xE0: + value = rune(octet & 0x0F) + case octet&0xF8 == 0xF0: + value = rune(octet & 0x07) + default: + value = 0 + } + + /* Check and decode the trailing octets. */ + + for k := 1; k < w; k++ { + octet = parser.raw_buffer[parser.raw_buffer_pos+k] + + /* Check if the octet is valid. */ + + if (octet & 0xC0) != 0x80 { + return yaml_parser_set_reader_error(parser, + "invalid trailing UTF-8 octet", + parser.offset+k, int(octet)) + } + + /* Decode the octet. */ + + value = (value << 6) + rune(octet&0x3F) + } + + /* Check the length of the sequence against the value. */ + switch { + case w == 1: + case w == 2 && value >= 0x80: + case w == 3 && value >= 0x800: + case w == 4 && value >= 0x10000: + default: + return yaml_parser_set_reader_error(parser, + "invalid length of a UTF-8 sequence", + parser.offset, -1) + } + + /* Check the range of the value. */ + + if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { + return yaml_parser_set_reader_error(parser, + "invalid Unicode character", + parser.offset, int(value)) + } + case yaml_UTF16LE_ENCODING, + yaml_UTF16BE_ENCODING: + + var low, high int + if parser.encoding == yaml_UTF16LE_ENCODING { + low, high = 0, 1 + } else { + high, low = 1, 0 + } + + /* + * The UTF-16 encoding is not as simple as one might + * naively think. Check RFC 2781 + * (http://www.ietf.org/rfc/rfc2781.txt). + * + * Normally, two subsequent bytes describe a Unicode + * character. However a special technique (called a + * surrogate pair) is used for specifying character + * values larger than 0xFFFF. + * + * A surrogate pair consists of two pseudo-characters: + * high surrogate area (0xD800-0xDBFF) + * low surrogate area (0xDC00-0xDFFF) + * + * The following formulas are used for decoding + * and encoding characters using surrogate pairs: + * + * U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) + * U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) + * W1 = 110110yyyyyyyyyy + * W2 = 110111xxxxxxxxxx + * + * where U is the character value, W1 is the high surrogate + * area, W2 is the low surrogate area. + */ + + /* Check for incomplete UTF-16 character. */ + + if raw_unread < 2 { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-16 character", + parser.offset, -1) + } + incomplete = true + break + } + + /* Get the character. */ + value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + + (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) + + /* Check for unexpected low surrogate area. */ + + if (value & 0xFC00) == 0xDC00 { + return yaml_parser_set_reader_error(parser, + "unexpected low surrogate area", + parser.offset, int(value)) + } + + /* Check for a high surrogate area. */ + + if (value & 0xFC00) == 0xD800 { + + w = 4 + + /* Check for incomplete surrogate pair. */ + + if raw_unread < 4 { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-16 surrogate pair", + parser.offset, -1) + } + incomplete = true + break + } + + /* Get the next character. */ + + value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + + (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) + + /* Check for a low surrogate area. */ + + if (value2 & 0xFC00) != 0xDC00 { + return yaml_parser_set_reader_error(parser, + "expected low surrogate area", + parser.offset+2, int(value2)) + } + + /* Generate the value of the surrogate pair. */ + + value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) + } else { + w = 2 + } + + break + + default: + panic("Impossible") /* Impossible. */ + } + + /* Check if the raw buffer contains enough bytes to form a character. */ + + if incomplete { + break + } + + /* + * Check if the character is in the allowed range: + * #x9 | #xA | #xD | [#x20-#x7E] (8 bit) + * | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) + * | [#x10000-#x10FFFF] (32 bit) + */ + + if !(value == 0x09 || value == 0x0A || value == 0x0D || + (value >= 0x20 && value <= 0x7E) || + (value == 0x85) || (value >= 0xA0 && value <= 0xD7FF) || + (value >= 0xE000 && value <= 0xFFFD) || + (value >= 0x10000 && value <= 0x10FFFF)) { + return yaml_parser_set_reader_error(parser, + "control characters are not allowed", + parser.offset, int(value)) + } + + /* Move the raw pointers. */ + + parser.raw_buffer_pos += w + parser.offset += w + + /* Finally put the character into the buffer. */ + + /* 0000 0000-0000 007F . 0xxxxxxx */ + if value <= 0x7F { + parser.buffer[buffer_end] = byte(value) + } else if value <= 0x7FF { + /* 0000 0080-0000 07FF . 110xxxxx 10xxxxxx */ + parser.buffer[buffer_end] = byte(0xC0 + (value >> 6)) + parser.buffer[buffer_end+1] = byte(0x80 + (value & 0x3F)) + } else if value <= 0xFFFF { + /* 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx */ + parser.buffer[buffer_end] = byte(0xE0 + (value >> 12)) + parser.buffer[buffer_end+1] = byte(0x80 + ((value >> 6) & 0x3F)) + parser.buffer[buffer_end+2] = byte(0x80 + (value & 0x3F)) + } else { + /* 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx */ + parser.buffer[buffer_end] = byte(0xF0 + (value >> 18)) + parser.buffer[buffer_end+1] = byte(0x80 + ((value >> 12) & 0x3F)) + parser.buffer[buffer_end+2] = byte(0x80 + ((value >> 6) & 0x3F)) + parser.buffer[buffer_end+3] = byte(0x80 + (value & 0x3F)) + } + + buffer_end += w + parser.unread++ + } + + /* On EOF, put NUL into the buffer and return. */ + + if parser.eof { + parser.buffer[buffer_end] = 0 + buffer_end++ + parser.buffer = parser.buffer[:buffer_end] + parser.unread++ + return true + } + + } + + parser.buffer = parser.buffer[:buffer_end] + return true +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/reader_test.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/reader_test.go new file mode 100644 index 0000000..3771ee7 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/reader_test.go @@ -0,0 +1,291 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + // "fmt" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +/* + * Test cases are stolen from + * http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt + */ + +type test_case struct { + title string + test string + result bool +} + +var _ = Describe("Reader", func() { + LONG := 100000 + + Context("UTF8 Sequences", func() { + utf8_sequences := []test_case{ + /* {"title", "test 1|test 2|...|test N!", (0 or 1)}, */ + + {"a simple test", "'test' is '\xd0\xbf\xd1\x80\xd0\xbe\xd0\xb2\xd0\xb5\xd1\x80\xd0\xba\xd0\xb0' in Russian!", true}, + + {"an empty line", "!", true}, + {"u-0 is a control character", "\x00!", false}, + {"u-80 is a control character", "\xc2\x80!", false}, + {"u-800 is valid", "\xe0\xa0\x80!", true}, + {"u-10000 is valid", "\xf0\x90\x80\x80!", true}, + {"5 bytes sequences are not allowed", "\xf8\x88\x80\x80\x80!", false}, + {"6 bytes sequences are not allowed", "\xfc\x84\x80\x80\x80\x80!", false}, + + {"u-7f is a control character", "\x7f!", false}, + {"u-7FF is valid", "\xdf\xbf!", true}, + {"u-FFFF is a control character", "\xef\xbf\xbf!", false}, + {"u-1FFFFF is too large", "\xf7\xbf\xbf\xbf!", false}, + {"u-3FFFFFF is 5 bytes", "\xfb\xbf\xbf\xbf\xbf!", false}, + {"u-7FFFFFFF is 6 bytes", "\xfd\xbf\xbf\xbf\xbf\xbf!", false}, + + {"u-D7FF", "\xed\x9f\xbf!", true}, + {"u-E000", "\xee\x80\x80!", true}, + {"u-FFFD", "\xef\xbf\xbd!", true}, + {"u-10FFFF", "\xf4\x8f\xbf\xbf!", true}, + {"u-110000", "\xf4\x90\x80\x80!", false}, + + {"first continuation byte", "\x80!", false}, + {"last continuation byte", "\xbf!", false}, + + {"2 continuation bytes", "\x80\xbf!", false}, + {"3 continuation bytes", "\x80\xbf\x80!", false}, + {"4 continuation bytes", "\x80\xbf\x80\xbf!", false}, + {"5 continuation bytes", "\x80\xbf\x80\xbf\x80!", false}, + {"6 continuation bytes", "\x80\xbf\x80\xbf\x80\xbf!", false}, + {"7 continuation bytes", "\x80\xbf\x80\xbf\x80\xbf\x80!", false}, + + {"sequence of all 64 possible continuation bytes", + "\x80|\x81|\x82|\x83|\x84|\x85|\x86|\x87|\x88|\x89|\x8a|\x8b|\x8c|\x8d|\x8e|\x8f|" + + "\x90|\x91|\x92|\x93|\x94|\x95|\x96|\x97|\x98|\x99|\x9a|\x9b|\x9c|\x9d|\x9e|\x9f|" + + "\xa0|\xa1|\xa2|\xa3|\xa4|\xa5|\xa6|\xa7|\xa8|\xa9|\xaa|\xab|\xac|\xad|\xae|\xaf|" + + "\xb0|\xb1|\xb2|\xb3|\xb4|\xb5|\xb6|\xb7|\xb8|\xb9|\xba|\xbb|\xbc|\xbd|\xbe|\xbf!", false}, + {"32 first bytes of 2-byte sequences {0xc0-0xdf}", + "\xc0 |\xc1 |\xc2 |\xc3 |\xc4 |\xc5 |\xc6 |\xc7 |\xc8 |\xc9 |\xca |\xcb |\xcc |\xcd |\xce |\xcf |" + + "\xd0 |\xd1 |\xd2 |\xd3 |\xd4 |\xd5 |\xd6 |\xd7 |\xd8 |\xd9 |\xda |\xdb |\xdc |\xdd |\xde |\xdf !", false}, + {"16 first bytes of 3-byte sequences {0xe0-0xef}", + "\xe0 |\xe1 |\xe2 |\xe3 |\xe4 |\xe5 |\xe6 |\xe7 |\xe8 |\xe9 |\xea |\xeb |\xec |\xed |\xee |\xef !", false}, + {"8 first bytes of 4-byte sequences {0xf0-0xf7}", "\xf0 |\xf1 |\xf2 |\xf3 |\xf4 |\xf5 |\xf6 |\xf7 !", false}, + {"4 first bytes of 5-byte sequences {0xf8-0xfb}", "\xf8 |\xf9 |\xfa |\xfb !", false}, + {"2 first bytes of 6-byte sequences {0xfc-0xfd}", "\xfc |\xfd !", false}, + + {"sequences with last byte missing {u-0}", + "\xc0|\xe0\x80|\xf0\x80\x80|\xf8\x80\x80\x80|\xfc\x80\x80\x80\x80!", false}, + {"sequences with last byte missing {u-...FF}", + "\xdf|\xef\xbf|\xf7\xbf\xbf|\xfb\xbf\xbf\xbf|\xfd\xbf\xbf\xbf\xbf!", false}, + + {"impossible bytes", "\xfe|\xff|\xfe\xfe\xff\xff!", false}, + + {"overlong sequences {u-2f}", + "\xc0\xaf|\xe0\x80\xaf|\xf0\x80\x80\xaf|\xf8\x80\x80\x80\xaf|\xfc\x80\x80\x80\x80\xaf!", false}, + + {"maximum overlong sequences", + "\xc1\xbf|\xe0\x9f\xbf|\xf0\x8f\xbf\xbf|\xf8\x87\xbf\xbf\xbf|\xfc\x83\xbf\xbf\xbf\xbf!", false}, + + {"overlong representation of the NUL character", + "\xc0\x80|\xe0\x80\x80|\xf0\x80\x80\x80|\xf8\x80\x80\x80\x80|\xfc\x80\x80\x80\x80\x80!", false}, + + {"single UTF-16 surrogates", + "\xed\xa0\x80|\xed\xad\xbf|\xed\xae\x80|\xed\xaf\xbf|\xed\xb0\x80|\xed\xbe\x80|\xed\xbf\xbf!", false}, + + {"paired UTF-16 surrogates", + "\xed\xa0\x80\xed\xb0\x80|\xed\xa0\x80\xed\xbf\xbf|\xed\xad\xbf\xed\xb0\x80|" + + "\xed\xad\xbf\xed\xbf\xbf|\xed\xae\x80\xed\xb0\x80|\xed\xae\x80\xed\xbf\xbf|" + + "\xed\xaf\xbf\xed\xb0\x80|\xed\xaf\xbf\xed\xbf\xbf!", false}, + + {"other illegal code positions", "\xef\xbf\xbe|\xef\xbf\xbf!", false}, + } + + check_sequence := func(tc test_case) { + It(tc.title, func() { + start := 0 + end := start + bytes := []byte(tc.test) + + for { + for bytes[end] != '|' && bytes[end] != '!' { + end++ + } + + parser := yaml_parser_t{} + yaml_parser_initialize(&parser) + yaml_parser_set_input_string(&parser, bytes) + result := yaml_parser_update_buffer(&parser, end-start) + Expect(result).To(Equal(tc.result)) + // outcome := '+' + // if result != tc.result { + // outcome = '-' + // } + // fmt.Printf("\t\t %c %s", outcome, tc.title) + // if parser.error == yaml_NO_ERROR { + // fmt.Printf("(no error)\n") + // } else if parser.error == yaml_READER_ERROR { + // if parser.problem_value != -1 { + // fmt.Printf("(reader error: %s: #%X at %d)\n", + // parser.problem, parser.problem_value, parser.problem_offset) + // } else { + // fmt.Printf("(reader error: %s: at %d)\n", + // parser.problem, parser.problem_offset) + // } + // } + + if bytes[end] == '!' { + break + } + + end++ + start = end + yaml_parser_delete(&parser) + } + }) + } + + for _, test := range utf8_sequences { + check_sequence(test) + } + }) + + Context("BOMs", func() { + boms := []test_case{ + /* {"title", "test!", lenth}, */ + {"no bom (utf-8)", "Hi is \xd0\x9f\xd1\x80\xd0\xb8\xd0\xb2\xd0\xb5\xd1\x82!", true}, + {"bom (utf-8)", "\xef\xbb\xbfHi is \xd0\x9f\xd1\x80\xd0\xb8\xd0\xb2\xd0\xb5\xd1\x82!", true}, + {"bom (utf-16-le)", "\xff\xfeH\x00i\x00 \x00i\x00s\x00 \x00\x1f\x04@\x04" + "8\x04" + "2\x04" + "5\x04" + "B\x04!", true}, + {"bom (utf-16-be)", "\xfe\xff\x00H\x00i\x00 \x00i\x00s\x00 \x04\x1f\x04@\x04" + "8\x04" + "2\x04" + "5\x04" + "B!", true}, + } + + check_bom := func(tc test_case) { + It(tc.title, func() { + start := 0 + end := start + bytes := []byte(tc.test) + + for bytes[end] != '!' { + end++ + } + + parser := yaml_parser_t{} + yaml_parser_initialize(&parser) + yaml_parser_set_input_string(&parser, bytes[:end-start]) + result := yaml_parser_update_buffer(&parser, end-start) + Expect(result).To(Equal(tc.result)) + yaml_parser_delete(&parser) + }) + } + + for _, test := range boms { + check_bom(test) + } + + }) + + Context("Long UTF8", func() { + It("parses properly", func() { + buffer := make([]byte, 0, 3+LONG*2) + buffer = append(buffer, '\xef', '\xbb', '\xbf') + for j := 0; j < LONG; j++ { + if j%2 == 1 { + buffer = append(buffer, '\xd0', '\x90') + } else { + buffer = append(buffer, '\xd0', '\xaf') + } + } + parser := yaml_parser_t{} + yaml_parser_initialize(&parser) + yaml_parser_set_input_string(&parser, buffer) + + for k := 0; k < LONG; k++ { + if parser.unread == 0 { + updated := yaml_parser_update_buffer(&parser, 1) + Expect(updated).To(BeTrue()) + // printf("\treader error: %s at %d\n", parser.problem, parser.problem_offset); + } + Expect(parser.unread).NotTo(Equal(0)) + // printf("\tnot enough characters at %d\n", k); + var ch0, ch1 byte + if k%2 == 1 { + ch0 = '\xd0' + ch1 = '\x90' + } else { + ch0 = '\xd0' + ch1 = '\xaf' + } + Expect(parser.buffer[parser.buffer_pos]).To(Equal(ch0)) + Expect(parser.buffer[parser.buffer_pos+1]).To(Equal(ch1)) + // printf("\tincorrect UTF-8 sequence: %X %X instead of %X %X\n", + // (int)parser.buffer.pointer[0], (int)parser.buffer.pointer[1], + // (int)ch0, (int)ch1); + + parser.buffer_pos += 2 + parser.unread -= 1 + } + updated := yaml_parser_update_buffer(&parser, 1) + Expect(updated).To(BeTrue()) + // printf("\treader error: %s at %d\n", parser.problem, parser.problem_offset); + yaml_parser_delete(&parser) + }) + }) + + Context("Long UTF16", func() { + It("parses properly", func() { + buffer := make([]byte, 0, 2+LONG*2) + buffer = append(buffer, '\xff', '\xfe') + for j := 0; j < LONG; j++ { + if j%2 == 1 { + buffer = append(buffer, '\x10', '\x04') + } else { + buffer = append(buffer, '/', '\x04') + } + } + parser := yaml_parser_t{} + yaml_parser_initialize(&parser) + yaml_parser_set_input_string(&parser, buffer) + + for k := 0; k < LONG; k++ { + if parser.unread == 0 { + updated := yaml_parser_update_buffer(&parser, 1) + Expect(updated).To(BeTrue()) + // printf("\treader error: %s at %d\n", parser.problem, parser.problem_offset); + } + Expect(parser.unread).NotTo(Equal(0)) + // printf("\tnot enough characters at %d\n", k); + var ch0, ch1 byte + if k%2 == 1 { + ch0 = '\xd0' + ch1 = '\x90' + } else { + ch0 = '\xd0' + ch1 = '\xaf' + } + Expect(parser.buffer[parser.buffer_pos]).To(Equal(ch0)) + Expect(parser.buffer[parser.buffer_pos+1]).To(Equal(ch1)) + // printf("\tincorrect UTF-8 sequence: %X %X instead of %X %X\n", + // (int)parser.buffer.pointer[0], (int)parser.buffer.pointer[1], + // (int)ch0, (int)ch1); + + parser.buffer_pos += 2 + parser.unread -= 1 + } + updated := yaml_parser_update_buffer(&parser, 1) + Expect(updated).To(BeTrue()) + // printf("\treader error: %s at %d\n", parser.problem, parser.problem_offset); + yaml_parser_delete(&parser) + }) + }) +}) diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/resolver.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/resolver.go new file mode 100644 index 0000000..fb9e8be --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/resolver.go @@ -0,0 +1,449 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "bytes" + "encoding/base64" + "fmt" + "math" + "reflect" + "regexp" + "strconv" + "strings" + "time" +) + +var byteSliceType = reflect.TypeOf([]byte(nil)) + +var binary_tags = [][]byte{[]byte("!binary"), []byte(yaml_BINARY_TAG)} +var bool_values map[string]bool +var null_values map[string]bool + +var signs = []byte{'-', '+'} +var nulls = []byte{'~', 'n', 'N'} +var bools = []byte{'t', 'T', 'f', 'F', 'y', 'Y', 'n', 'N', 'o', 'O'} + +var timestamp_regexp *regexp.Regexp +var ymd_regexp *regexp.Regexp + +func init() { + bool_values = make(map[string]bool) + bool_values["y"] = true + bool_values["yes"] = true + bool_values["n"] = false + bool_values["no"] = false + bool_values["true"] = true + bool_values["false"] = false + bool_values["on"] = true + bool_values["off"] = false + + null_values = make(map[string]bool) + null_values["~"] = true + null_values["null"] = true + null_values["Null"] = true + null_values["NULL"] = true + + timestamp_regexp = regexp.MustCompile("^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)(?:(?:[Tt]|[ \t]+)([0-9][0-9]?):([0-9][0-9]):([0-9][0-9])(?:\\.([0-9]*))?(?:[ \t]*(?:Z|([-+][0-9][0-9]?)(?::([0-9][0-9])?)?))?)?$") + ymd_regexp = regexp.MustCompile("^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)$") +} + +func resolve(event yaml_event_t, v reflect.Value, useNumber bool) (string, error) { + val := string(event.value) + + if null_values[val] { + v.Set(reflect.Zero(v.Type())) + return yaml_NULL_TAG, nil + } + + switch v.Kind() { + case reflect.String: + if useNumber && v.Type() == numberType { + tag, i := resolveInterface(event, useNumber) + if n, ok := i.(Number); ok { + v.Set(reflect.ValueOf(n)) + return tag, nil + } + return "", fmt.Errorf("Not a number: '%s' at %s", event.value, event.start_mark) + } + + return resolve_string(val, v, event) + case reflect.Bool: + return resolve_bool(val, v, event) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return resolve_int(val, v, useNumber, event) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return resolve_uint(val, v, useNumber, event) + case reflect.Float32, reflect.Float64: + return resolve_float(val, v, useNumber, event) + case reflect.Interface: + _, i := resolveInterface(event, useNumber) + if i != nil { + v.Set(reflect.ValueOf(i)) + } else { + v.Set(reflect.Zero(v.Type())) + } + + case reflect.Struct: + return resolve_time(val, v, event) + case reflect.Slice: + if v.Type() != byteSliceType { + return "", fmt.Errorf("Cannot resolve %s into %s at %s", val, v.String(), event.start_mark) + } + b, err := decode_binary(event.value, event) + if err != nil { + return "", err + } + + v.Set(reflect.ValueOf(b)) + default: + return "", fmt.Errorf("Unknown resolution for '%s' using %s at %s", val, v.String(), event.start_mark) + } + + return yaml_STR_TAG, nil +} + +func hasBinaryTag(event yaml_event_t) bool { + for _, tag := range binary_tags { + if bytes.Equal(event.tag, tag) { + return true + } + } + return false +} + +func decode_binary(value []byte, event yaml_event_t) ([]byte, error) { + b := make([]byte, base64.StdEncoding.DecodedLen(len(value))) + n, err := base64.StdEncoding.Decode(b, value) + if err != nil { + return nil, fmt.Errorf("Invalid base64 text: '%s' at %s", string(b), event.start_mark) + } + return b[:n], nil +} + +func resolve_string(val string, v reflect.Value, event yaml_event_t) (string, error) { + if len(event.tag) > 0 { + if hasBinaryTag(event) { + b, err := decode_binary(event.value, event) + if err != nil { + return "", err + } + val = string(b) + } + } + v.SetString(val) + return yaml_STR_TAG, nil +} + +func resolve_bool(val string, v reflect.Value, event yaml_event_t) (string, error) { + b, found := bool_values[strings.ToLower(val)] + if !found { + return "", fmt.Errorf("Invalid boolean: '%s' at %s", val, event.start_mark) + } + + v.SetBool(b) + return yaml_BOOL_TAG, nil +} + +func resolve_int(val string, v reflect.Value, useNumber bool, event yaml_event_t) (string, error) { + original := val + val = strings.Replace(val, "_", "", -1) + var value uint64 + + isNumberValue := v.Type() == numberType + + sign := int64(1) + if val[0] == '-' { + sign = -1 + val = val[1:] + } else if val[0] == '+' { + val = val[1:] + } + + base := 0 + if val == "0" { + if isNumberValue { + v.SetString("0") + } else { + v.Set(reflect.Zero(v.Type())) + } + + return yaml_INT_TAG, nil + } + + if strings.HasPrefix(val, "0o") { + base = 8 + val = val[2:] + } + + value, err := strconv.ParseUint(val, base, 64) + if err != nil { + return "", fmt.Errorf("Invalid integer: '%s' at %s", original, event.start_mark) + } + + var val64 int64 + if value <= math.MaxInt64 { + val64 = int64(value) + if sign == -1 { + val64 = -val64 + } + } else if sign == -1 && value == uint64(math.MaxInt64)+1 { + val64 = math.MinInt64 + } else { + return "", fmt.Errorf("Invalid integer: '%s' at %s", original, event.start_mark) + } + + if isNumberValue { + v.SetString(strconv.FormatInt(val64, 10)) + } else { + if v.OverflowInt(val64) { + return "", fmt.Errorf("Invalid integer: '%s' at %s", original, event.start_mark) + } + v.SetInt(val64) + } + + return yaml_INT_TAG, nil +} + +func resolve_uint(val string, v reflect.Value, useNumber bool, event yaml_event_t) (string, error) { + original := val + val = strings.Replace(val, "_", "", -1) + var value uint64 + + isNumberValue := v.Type() == numberType + + if val[0] == '-' { + return "", fmt.Errorf("Unsigned int with negative value: '%s' at %s", original, event.start_mark) + } + + if val[0] == '+' { + val = val[1:] + } + + base := 0 + if val == "0" { + if isNumberValue { + v.SetString("0") + } else { + v.Set(reflect.Zero(v.Type())) + } + + return yaml_INT_TAG, nil + } + + if strings.HasPrefix(val, "0o") { + base = 8 + val = val[2:] + } + + value, err := strconv.ParseUint(val, base, 64) + if err != nil { + return "", fmt.Errorf("Invalid unsigned integer: '%s' at %s", val, event.start_mark) + } + + if isNumberValue { + v.SetString(strconv.FormatUint(value, 10)) + } else { + if v.OverflowUint(value) { + return "", fmt.Errorf("Invalid unsigned integer: '%s' at %s", val, event.start_mark) + } + + v.SetUint(value) + } + + return yaml_INT_TAG, nil +} + +func resolve_float(val string, v reflect.Value, useNumber bool, event yaml_event_t) (string, error) { + val = strings.Replace(val, "_", "", -1) + var value float64 + + isNumberValue := v.Type() == numberType + typeBits := 64 + if !isNumberValue { + typeBits = v.Type().Bits() + } + + sign := 1 + if val[0] == '-' { + sign = -1 + val = val[1:] + } else if val[0] == '+' { + val = val[1:] + } + + valLower := strings.ToLower(val) + if valLower == ".inf" { + value = math.Inf(sign) + } else if valLower == ".nan" { + value = math.NaN() + } else { + var err error + value, err = strconv.ParseFloat(val, typeBits) + value *= float64(sign) + + if err != nil { + return "", fmt.Errorf("Invalid float: '%s' at %s", val, event.start_mark) + } + } + + if isNumberValue { + v.SetString(strconv.FormatFloat(value, 'g', -1, typeBits)) + } else { + if v.OverflowFloat(value) { + return "", fmt.Errorf("Invalid float: '%s' at %s", val, event.start_mark) + } + + v.SetFloat(value) + } + + return yaml_FLOAT_TAG, nil +} + +func resolve_time(val string, v reflect.Value, event yaml_event_t) (string, error) { + var parsedTime time.Time + matches := ymd_regexp.FindStringSubmatch(val) + if len(matches) > 0 { + year, _ := strconv.Atoi(matches[1]) + month, _ := strconv.Atoi(matches[2]) + day, _ := strconv.Atoi(matches[3]) + parsedTime = time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.UTC) + } else { + matches = timestamp_regexp.FindStringSubmatch(val) + if len(matches) == 0 { + return "", fmt.Errorf("Invalid timestamp: '%s' at %s", val, event.start_mark) + } + + year, _ := strconv.Atoi(matches[1]) + month, _ := strconv.Atoi(matches[2]) + day, _ := strconv.Atoi(matches[3]) + hour, _ := strconv.Atoi(matches[4]) + min, _ := strconv.Atoi(matches[5]) + sec, _ := strconv.Atoi(matches[6]) + + nsec := 0 + if matches[7] != "" { + millis, _ := strconv.Atoi(matches[7]) + nsec = int(time.Duration(millis) * time.Millisecond) + } + + loc := time.UTC + if matches[8] != "" { + sign := matches[8][0] + hr, _ := strconv.Atoi(matches[8][1:]) + min := 0 + if matches[9] != "" { + min, _ = strconv.Atoi(matches[9]) + } + + zoneOffset := (hr*60 + min) * 60 + if sign == '-' { + zoneOffset = -zoneOffset + } + + loc = time.FixedZone("", zoneOffset) + } + parsedTime = time.Date(year, time.Month(month), day, hour, min, sec, nsec, loc) + } + + v.Set(reflect.ValueOf(parsedTime)) + return "", nil +} + +func resolveInterface(event yaml_event_t, useNumber bool) (string, interface{}) { + val := string(event.value) + if len(event.tag) == 0 && !event.implicit { + return "", val + } + + if len(val) == 0 { + return yaml_NULL_TAG, nil + } + + var result interface{} + + sign := false + c := val[0] + switch { + case bytes.IndexByte(signs, c) != -1: + sign = true + fallthrough + case c >= '0' && c <= '9': + i := int64(0) + result = &i + if useNumber { + var n Number + result = &n + } + + v := reflect.ValueOf(result).Elem() + if _, err := resolve_int(val, v, useNumber, event); err == nil { + return yaml_INT_TAG, v.Interface() + } + + f := float64(0) + result = &f + if useNumber { + var n Number + result = &n + } + + v = reflect.ValueOf(result).Elem() + if _, err := resolve_float(val, v, useNumber, event); err == nil { + return yaml_FLOAT_TAG, v.Interface() + } + + if !sign { + t := time.Time{} + if _, err := resolve_time(val, reflect.ValueOf(&t).Elem(), event); err == nil { + return "", t + } + } + case bytes.IndexByte(nulls, c) != -1: + if null_values[val] { + return yaml_NULL_TAG, nil + } + b := false + if _, err := resolve_bool(val, reflect.ValueOf(&b).Elem(), event); err == nil { + return yaml_BOOL_TAG, b + } + case c == '.': + f := float64(0) + result = &f + if useNumber { + var n Number + result = &n + } + + v := reflect.ValueOf(result).Elem() + if _, err := resolve_float(val, v, useNumber, event); err == nil { + return yaml_FLOAT_TAG, v.Interface() + } + case bytes.IndexByte(bools, c) != -1: + b := false + if _, err := resolve_bool(val, reflect.ValueOf(&b).Elem(), event); err == nil { + return yaml_BOOL_TAG, b + } + } + + if hasBinaryTag(event) { + bytes, err := decode_binary(event.value, event) + if err == nil { + return yaml_BINARY_TAG, bytes + } + } + + return yaml_STR_TAG, val +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/resolver_test.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/resolver_test.go new file mode 100644 index 0000000..88e61fd --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/resolver_test.go @@ -0,0 +1,665 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "math" + "reflect" + "time" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("Resolver", func() { + var event yaml_event_t + + var nulls = []string{"~", "null", "Null", "NULL"} + + BeforeEach(func() { + event = yaml_event_t{} + }) + + Context("Resolve", func() { + Context("Implicit events", func() { + checkNulls := func(f func()) { + for _, null := range nulls { + event = yaml_event_t{implicit: true} + event.value = []byte(null) + f() + } + } + + BeforeEach(func() { + event.implicit = true + }) + + Context("String", func() { + It("resolves a string", func() { + aString := "" + v := reflect.ValueOf(&aString) + event.value = []byte("abc") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_STR_TAG)) + Expect(aString).To(Equal("abc")) + }) + + It("resolves the empty string", func() { + aString := "abc" + v := reflect.ValueOf(&aString) + event.value = []byte("") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_STR_TAG)) + Expect(aString).To(Equal("")) + + }) + + It("resolves null", func() { + checkNulls(func() { + aString := "abc" + v := reflect.ValueOf(&aString) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(aString).To(Equal("")) + }) + }) + + It("resolves null pointers", func() { + checkNulls(func() { + aString := "abc" + pString := &aString + v := reflect.ValueOf(&pString) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(pString).To(BeNil()) + }) + }) + + }) + + Context("Booleans", func() { + match_bool := func(val string, expected bool) { + b := !expected + + v := reflect.ValueOf(&b) + event.value = []byte(val) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_BOOL_TAG)) + Expect(b).To(Equal(expected)) + } + + It("resolves on", func() { + match_bool("on", true) + match_bool("ON", true) + }) + + It("resolves off", func() { + match_bool("off", false) + match_bool("OFF", false) + }) + + It("resolves true", func() { + match_bool("true", true) + match_bool("TRUE", true) + }) + + It("resolves false", func() { + match_bool("false", false) + match_bool("FALSE", false) + }) + + It("resolves yes", func() { + match_bool("yes", true) + match_bool("YES", true) + }) + + It("resolves no", func() { + match_bool("no", false) + match_bool("NO", false) + }) + + It("reports an error otherwise", func() { + b := true + v := reflect.ValueOf(&b) + event.value = []byte("fail") + + _, err := resolve(event, v.Elem(), false) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("Invalid boolean: 'fail' at line 0, column 0")) + }) + + It("resolves null", func() { + checkNulls(func() { + b := true + v := reflect.ValueOf(&b) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(b).To(BeFalse()) + }) + }) + + It("resolves null pointers", func() { + checkNulls(func() { + b := true + pb := &b + v := reflect.ValueOf(&pb) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(pb).To(BeNil()) + }) + }) + }) + + Context("Ints", func() { + It("simple ints", func() { + i := 0 + v := reflect.ValueOf(&i) + event.value = []byte("1234") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(1234)) + }) + + It("positive ints", func() { + i := int16(0) + v := reflect.ValueOf(&i) + event.value = []byte("+678") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(int16(678))) + }) + + It("negative ints", func() { + i := int32(0) + v := reflect.ValueOf(&i) + event.value = []byte("-2345") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(int32(-2345))) + }) + + It("base 8", func() { + i := 0 + v := reflect.ValueOf(&i) + event.value = []byte("0o12") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(10)) + }) + + It("base 16", func() { + i := 0 + v := reflect.ValueOf(&i) + event.value = []byte("0xff") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(255)) + }) + + It("fails on overflow", func() { + i := int8(0) + v := reflect.ValueOf(&i) + event.value = []byte("2345") + + _, err := resolve(event, v.Elem(), false) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("Invalid integer: '2345' at line 0, column 0")) + }) + + It("fails on invalid int", func() { + i := 0 + v := reflect.ValueOf(&i) + event.value = []byte("234f") + + _, err := resolve(event, v.Elem(), false) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("Invalid integer: '234f' at line 0, column 0")) + }) + + It("resolves null", func() { + checkNulls(func() { + i := 1 + v := reflect.ValueOf(&i) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(i).To(Equal(0)) + }) + }) + + It("resolves null pointers", func() { + checkNulls(func() { + i := 1 + pi := &i + v := reflect.ValueOf(&pi) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(pi).To(BeNil()) + }) + }) + + It("returns a Number", func() { + var i Number + v := reflect.ValueOf(&i) + + tag, err := resolve_int("12345", v.Elem(), true, event) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(Number("12345"))) + Expect(i.Int64()).To(Equal(int64(12345))) + + event.value = []byte("1234") + tag, err = resolve(event, v.Elem(), true) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(Number("1234"))) + }) + }) + + Context("UInts", func() { + It("resolves simple uints", func() { + i := uint(0) + v := reflect.ValueOf(&i) + event.value = []byte("1234") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(uint(1234))) + }) + + It("resolves positive uints", func() { + i := uint16(0) + v := reflect.ValueOf(&i) + event.value = []byte("+678") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(uint16(678))) + }) + + It("base 8", func() { + i := uint(0) + v := reflect.ValueOf(&i) + event.value = []byte("0o12") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(uint(10))) + }) + + It("base 16", func() { + i := uint(0) + v := reflect.ValueOf(&i) + event.value = []byte("0xff") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(uint(255))) + }) + + It("fails with negative ints", func() { + i := uint(0) + v := reflect.ValueOf(&i) + event.value = []byte("-2345") + + _, err := resolve(event, v.Elem(), false) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("Unsigned int with negative value: '-2345' at line 0, column 0")) + }) + + It("fails on overflow", func() { + i := uint8(0) + v := reflect.ValueOf(&i) + event.value = []byte("2345") + + _, err := resolve(event, v.Elem(), false) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("Invalid unsigned integer: '2345' at line 0, column 0")) + }) + + It("resolves null", func() { + checkNulls(func() { + i := uint(1) + v := reflect.ValueOf(&i) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(i).To(Equal(uint(0))) + }) + }) + + It("resolves null pointers", func() { + checkNulls(func() { + i := uint(1) + pi := &i + v := reflect.ValueOf(&pi) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(pi).To(BeNil()) + }) + }) + + It("returns a Number", func() { + var i Number + v := reflect.ValueOf(&i) + + tag, err := resolve_uint("12345", v.Elem(), true, event) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(Number("12345"))) + + event.value = []byte("1234") + tag, err = resolve(event, v.Elem(), true) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_INT_TAG)) + Expect(i).To(Equal(Number("1234"))) + }) + }) + + Context("Floats", func() { + It("float32", func() { + f := float32(0) + v := reflect.ValueOf(&f) + event.value = []byte("2345.01") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_FLOAT_TAG)) + Expect(f).To(Equal(float32(2345.01))) + }) + + It("float64", func() { + f := float64(0) + v := reflect.ValueOf(&f) + event.value = []byte("-456456.01") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_FLOAT_TAG)) + Expect(f).To(Equal(float64(-456456.01))) + }) + + It("+inf", func() { + f := float64(0) + v := reflect.ValueOf(&f) + event.value = []byte("+.inf") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_FLOAT_TAG)) + Expect(f).To(Equal(math.Inf(1))) + }) + + It("-inf", func() { + f := float32(0) + v := reflect.ValueOf(&f) + event.value = []byte("-.inf") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_FLOAT_TAG)) + Expect(f).To(Equal(float32(math.Inf(-1)))) + }) + + It("nan", func() { + f := float64(0) + v := reflect.ValueOf(&f) + event.value = []byte(".NaN") + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_FLOAT_TAG)) + Expect(math.IsNaN(f)).To(BeTrue()) + }) + + It("fails on overflow", func() { + i := float32(0) + v := reflect.ValueOf(&i) + event.value = []byte("123e10000") + + _, err := resolve(event, v.Elem(), false) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("Invalid float: '123e10000' at line 0, column 0")) + }) + + It("fails on invalid float", func() { + i := float32(0) + v := reflect.ValueOf(&i) + event.value = []byte("123e1a") + + _, err := resolve(event, v.Elem(), false) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("Invalid float: '123e1a' at line 0, column 0")) + }) + + It("resolves null", func() { + checkNulls(func() { + f := float64(1) + v := reflect.ValueOf(&f) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(f).To(Equal(0.0)) + }) + }) + + It("resolves null pointers", func() { + checkNulls(func() { + f := float64(1) + pf := &f + v := reflect.ValueOf(&pf) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(pf).To(BeNil()) + }) + }) + + It("returns a Number", func() { + var i Number + v := reflect.ValueOf(&i) + + tag, err := resolve_float("12.345", v.Elem(), true, event) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_FLOAT_TAG)) + Expect(i).To(Equal(Number("12.345"))) + Expect(i.Float64()).To(Equal(12.345)) + + event.value = []byte("1.234") + tag, err = resolve(event, v.Elem(), true) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_FLOAT_TAG)) + Expect(i).To(Equal(Number("1.234"))) + }) + }) + + Context("Timestamps", func() { + parse_date := func(val string, date time.Time) { + d := time.Now() + v := reflect.ValueOf(&d) + event.value = []byte(val) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal("")) + Expect(d).To(Equal(date)) + } + + It("date", func() { + parse_date("2002-12-14", time.Date(2002, time.December, 14, 0, 0, 0, 0, time.UTC)) + }) + + It("canonical", func() { + parse_date("2001-12-15T02:59:43.1Z", time.Date(2001, time.December, 15, 2, 59, 43, int(1*time.Millisecond), time.UTC)) + }) + + It("iso8601", func() { + parse_date("2001-12-14t21:59:43.10-05:00", time.Date(2001, time.December, 14, 21, 59, 43, int(10*time.Millisecond), time.FixedZone("", -5*3600))) + }) + + It("space separated", func() { + parse_date("2001-12-14 21:59:43.10 -5", time.Date(2001, time.December, 14, 21, 59, 43, int(10*time.Millisecond), time.FixedZone("", -5*3600))) + }) + + It("no time zone", func() { + parse_date("2001-12-15 2:59:43.10", time.Date(2001, time.December, 15, 2, 59, 43, int(10*time.Millisecond), time.UTC)) + }) + + It("resolves null", func() { + checkNulls(func() { + d := time.Now() + v := reflect.ValueOf(&d) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(d).To(Equal(time.Time{})) + }) + }) + + It("resolves null pointers", func() { + checkNulls(func() { + d := time.Now() + pd := &d + v := reflect.ValueOf(&pd) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_NULL_TAG)) + Expect(pd).To(BeNil()) + }) + }) + }) + + Context("Binary tag", func() { + It("string", func() { + checkNulls(func() { + event.value = []byte("YWJjZGVmZw==") + event.tag = []byte("!binary") + aString := "" + v := reflect.ValueOf(&aString) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_STR_TAG)) + Expect(aString).To(Equal("abcdefg")) + }) + }) + + It("[]byte", func() { + checkNulls(func() { + event.value = []byte("YWJjZGVmZw==") + event.tag = []byte("!binary") + bytes := []byte(nil) + v := reflect.ValueOf(&bytes) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_STR_TAG)) + Expect(bytes).To(Equal([]byte("abcdefg"))) + }) + }) + + It("returns a []byte when provided no hints", func() { + checkNulls(func() { + event.value = []byte("YWJjZGVmZw==") + event.tag = []byte("!binary") + var intf interface{} + v := reflect.ValueOf(&intf) + + tag, err := resolve(event, v.Elem(), false) + Expect(err).NotTo(HaveOccurred()) + Expect(tag).To(Equal(yaml_STR_TAG)) + Expect(intf).To(Equal([]byte("abcdefg"))) + }) + }) + }) + + It("fails to resolve a pointer", func() { + aString := "" + pString := &aString + v := reflect.ValueOf(&pString) + event.value = []byte("abc") + + _, err := resolve(event, v.Elem(), false) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("Unknown resolution for 'abc' using <*string Value> at line 0, column 0")) + }) + }) + + Context("Not an implicit event && no tag", func() { + It("bool returns a string", func() { + event.value = []byte("on") + + tag, result := resolveInterface(event, false) + Expect(result).To(Equal("on")) + Expect(tag).To(Equal("")) + }) + + It("number returns a string", func() { + event.value = []byte("1234") + + tag, result := resolveInterface(event, false) + Expect(result).To(Equal("1234")) + Expect(tag).To(Equal("")) + }) + + It("returns the empty string", func() { + event.value = []byte("") + // event.implicit = true + + tag, result := resolveInterface(event, false) + Expect(result).To(Equal("")) + Expect(tag).To(Equal("")) + }) + }) + }) +}) diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/run_parser.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/run_parser.go new file mode 100644 index 0000000..25c2981 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/run_parser.go @@ -0,0 +1,62 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "fmt" + "os" +) + +func Run_parser(cmd string, args []string) { + for i := 0; i < len(args); i++ { + fmt.Printf("[%d] Scanning '%s'", i, args[i]) + file, err := os.Open(args[i]) + if err != nil { + panic(fmt.Sprintf("Invalid file '%s': %s", args[i], err.Error())) + } + + parser := yaml_parser_t{} + yaml_parser_initialize(&parser) + yaml_parser_set_input_reader(&parser, file) + + failed := false + token := yaml_token_t{} + count := 0 + for { + if !yaml_parser_scan(&parser, &token) { + failed = true + break + } + + if token.token_type == yaml_STREAM_END_TOKEN { + break + } + count++ + } + + file.Close() + + msg := "SUCCESS" + if failed { + msg = "FAILED" + if parser.error != yaml_NO_ERROR { + m := parser.problem_mark + fmt.Printf("ERROR: (%s) %s @ line: %d col: %d\n", + parser.context, parser.problem, m.line, m.column) + } + } + fmt.Printf("%s (%d tokens)\n", msg, count) + } +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/scanner.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/scanner.go new file mode 100644 index 0000000..f856a56 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/scanner.go @@ -0,0 +1,3318 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "bytes" +) + +/* + * Introduction + * ************ + * + * The following notes assume that you are familiar with the YAML specification + * (http://yaml.org/spec/cvs/current.html). We mostly follow it, although in + * some cases we are less restrictive that it requires. + * + * The process of transforming a YAML stream into a sequence of events is + * divided on two steps: Scanning and Parsing. + * + * The Scanner transforms the input stream into a sequence of tokens, while the + * parser transform the sequence of tokens produced by the Scanner into a + * sequence of parsing events. + * + * The Scanner is rather clever and complicated. The Parser, on the contrary, + * is a straightforward implementation of a recursive-descendant parser (or, + * LL(1) parser, as it is usually called). + * + * Actually there are two issues of Scanning that might be called "clever", the + * rest is quite straightforward. The issues are "block collection start" and + * "simple keys". Both issues are explained below in details. + * + * Here the Scanning step is explained and implemented. We start with the list + * of all the tokens produced by the Scanner together with short descriptions. + * + * Now, tokens: + * + * STREAM-START(encoding) # The stream start. + * STREAM-END # The stream end. + * VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. + * TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. + * DOCUMENT-START # '---' + * DOCUMENT-END # '...' + * BLOCK-SEQUENCE-START # Indentation increase denoting a block + * BLOCK-MAPPING-START # sequence or a block mapping. + * BLOCK-END # Indentation decrease. + * FLOW-SEQUENCE-START # '[' + * FLOW-SEQUENCE-END # ']' + * BLOCK-SEQUENCE-START # '{' + * BLOCK-SEQUENCE-END # '}' + * BLOCK-ENTRY # '-' + * FLOW-ENTRY # ',' + * KEY # '?' or nothing (simple keys). + * VALUE # ':' + * ALIAS(anchor) # '*anchor' + * ANCHOR(anchor) # '&anchor' + * TAG(handle,suffix) # '!handle!suffix' + * SCALAR(value,style) # A scalar. + * + * The following two tokens are "virtual" tokens denoting the beginning and the + * end of the stream: + * + * STREAM-START(encoding) + * STREAM-END + * + * We pass the information about the input stream encoding with the + * STREAM-START token. + * + * The next two tokens are responsible for tags: + * + * VERSION-DIRECTIVE(major,minor) + * TAG-DIRECTIVE(handle,prefix) + * + * Example: + * + * %YAML 1.1 + * %TAG ! !foo + * %TAG !yaml! tag:yaml.org,2002: + * --- + * + * The correspoding sequence of tokens: + * + * STREAM-START(utf-8) + * VERSION-DIRECTIVE(1,1) + * TAG-DIRECTIVE("!","!foo") + * TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") + * DOCUMENT-START + * STREAM-END + * + * Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole + * line. + * + * The document start and end indicators are represented by: + * + * DOCUMENT-START + * DOCUMENT-END + * + * Note that if a YAML stream contains an implicit document (without '---' + * and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be + * produced. + * + * In the following examples, we present whole documents together with the + * produced tokens. + * + * 1. An implicit document: + * + * 'a scalar' + * + * Tokens: + * + * STREAM-START(utf-8) + * SCALAR("a scalar",single-quoted) + * STREAM-END + * + * 2. An explicit document: + * + * --- + * 'a scalar' + * ... + * + * Tokens: + * + * STREAM-START(utf-8) + * DOCUMENT-START + * SCALAR("a scalar",single-quoted) + * DOCUMENT-END + * STREAM-END + * + * 3. Several documents in a stream: + * + * 'a scalar' + * --- + * 'another scalar' + * --- + * 'yet another scalar' + * + * Tokens: + * + * STREAM-START(utf-8) + * SCALAR("a scalar",single-quoted) + * DOCUMENT-START + * SCALAR("another scalar",single-quoted) + * DOCUMENT-START + * SCALAR("yet another scalar",single-quoted) + * STREAM-END + * + * We have already introduced the SCALAR token above. The following tokens are + * used to describe aliases, anchors, tag, and scalars: + * + * ALIAS(anchor) + * ANCHOR(anchor) + * TAG(handle,suffix) + * SCALAR(value,style) + * + * The following series of examples illustrate the usage of these tokens: + * + * 1. A recursive sequence: + * + * &A [ *A ] + * + * Tokens: + * + * STREAM-START(utf-8) + * ANCHOR("A") + * FLOW-SEQUENCE-START + * ALIAS("A") + * FLOW-SEQUENCE-END + * STREAM-END + * + * 2. A tagged scalar: + * + * !!float "3.14" # A good approximation. + * + * Tokens: + * + * STREAM-START(utf-8) + * TAG("!!","float") + * SCALAR("3.14",double-quoted) + * STREAM-END + * + * 3. Various scalar styles: + * + * --- # Implicit empty plain scalars do not produce tokens. + * --- a plain scalar + * --- 'a single-quoted scalar' + * --- "a double-quoted scalar" + * --- |- + * a literal scalar + * --- >- + * a folded + * scalar + * + * Tokens: + * + * STREAM-START(utf-8) + * DOCUMENT-START + * DOCUMENT-START + * SCALAR("a plain scalar",plain) + * DOCUMENT-START + * SCALAR("a single-quoted scalar",single-quoted) + * DOCUMENT-START + * SCALAR("a double-quoted scalar",double-quoted) + * DOCUMENT-START + * SCALAR("a literal scalar",literal) + * DOCUMENT-START + * SCALAR("a folded scalar",folded) + * STREAM-END + * + * Now it's time to review collection-related tokens. We will start with + * flow collections: + * + * FLOW-SEQUENCE-START + * FLOW-SEQUENCE-END + * FLOW-MAPPING-START + * FLOW-MAPPING-END + * FLOW-ENTRY + * KEY + * VALUE + * + * The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and + * FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' + * correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the + * indicators '?' and ':', which are used for denoting mapping keys and values, + * are represented by the KEY and VALUE tokens. + * + * The following examples show flow collections: + * + * 1. A flow sequence: + * + * [item 1, item 2, item 3] + * + * Tokens: + * + * STREAM-START(utf-8) + * FLOW-SEQUENCE-START + * SCALAR("item 1",plain) + * FLOW-ENTRY + * SCALAR("item 2",plain) + * FLOW-ENTRY + * SCALAR("item 3",plain) + * FLOW-SEQUENCE-END + * STREAM-END + * + * 2. A flow mapping: + * + * { + * a simple key: a value, # Note that the KEY token is produced. + * ? a complex key: another value, + * } + * + * Tokens: + * + * STREAM-START(utf-8) + * FLOW-MAPPING-START + * KEY + * SCALAR("a simple key",plain) + * VALUE + * SCALAR("a value",plain) + * FLOW-ENTRY + * KEY + * SCALAR("a complex key",plain) + * VALUE + * SCALAR("another value",plain) + * FLOW-ENTRY + * FLOW-MAPPING-END + * STREAM-END + * + * A simple key is a key which is not denoted by the '?' indicator. Note that + * the Scanner still produce the KEY token whenever it encounters a simple key. + * + * For scanning block collections, the following tokens are used (note that we + * repeat KEY and VALUE here): + * + * BLOCK-SEQUENCE-START + * BLOCK-MAPPING-START + * BLOCK-END + * BLOCK-ENTRY + * KEY + * VALUE + * + * The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation + * increase that precedes a block collection (cf. the INDENT token in Python). + * The token BLOCK-END denote indentation decrease that ends a block collection + * (cf. the DEDENT token in Python). However YAML has some syntax pecularities + * that makes detections of these tokens more complex. + * + * The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators + * '-', '?', and ':' correspondingly. + * + * The following examples show how the tokens BLOCK-SEQUENCE-START, + * BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: + * + * 1. Block sequences: + * + * - item 1 + * - item 2 + * - + * - item 3.1 + * - item 3.2 + * - + * key 1: value 1 + * key 2: value 2 + * + * Tokens: + * + * STREAM-START(utf-8) + * BLOCK-SEQUENCE-START + * BLOCK-ENTRY + * SCALAR("item 1",plain) + * BLOCK-ENTRY + * SCALAR("item 2",plain) + * BLOCK-ENTRY + * BLOCK-SEQUENCE-START + * BLOCK-ENTRY + * SCALAR("item 3.1",plain) + * BLOCK-ENTRY + * SCALAR("item 3.2",plain) + * BLOCK-END + * BLOCK-ENTRY + * BLOCK-MAPPING-START + * KEY + * SCALAR("key 1",plain) + * VALUE + * SCALAR("value 1",plain) + * KEY + * SCALAR("key 2",plain) + * VALUE + * SCALAR("value 2",plain) + * BLOCK-END + * BLOCK-END + * STREAM-END + * + * 2. Block mappings: + * + * a simple key: a value # The KEY token is produced here. + * ? a complex key + * : another value + * a mapping: + * key 1: value 1 + * key 2: value 2 + * a sequence: + * - item 1 + * - item 2 + * + * Tokens: + * + * STREAM-START(utf-8) + * BLOCK-MAPPING-START + * KEY + * SCALAR("a simple key",plain) + * VALUE + * SCALAR("a value",plain) + * KEY + * SCALAR("a complex key",plain) + * VALUE + * SCALAR("another value",plain) + * KEY + * SCALAR("a mapping",plain) + * BLOCK-MAPPING-START + * KEY + * SCALAR("key 1",plain) + * VALUE + * SCALAR("value 1",plain) + * KEY + * SCALAR("key 2",plain) + * VALUE + * SCALAR("value 2",plain) + * BLOCK-END + * KEY + * SCALAR("a sequence",plain) + * VALUE + * BLOCK-SEQUENCE-START + * BLOCK-ENTRY + * SCALAR("item 1",plain) + * BLOCK-ENTRY + * SCALAR("item 2",plain) + * BLOCK-END + * BLOCK-END + * STREAM-END + * + * YAML does not always require to start a new block collection from a new + * line. If the current line contains only '-', '?', and ':' indicators, a new + * block collection may start at the current line. The following examples + * illustrate this case: + * + * 1. Collections in a sequence: + * + * - - item 1 + * - item 2 + * - key 1: value 1 + * key 2: value 2 + * - ? complex key + * : complex value + * + * Tokens: + * + * STREAM-START(utf-8) + * BLOCK-SEQUENCE-START + * BLOCK-ENTRY + * BLOCK-SEQUENCE-START + * BLOCK-ENTRY + * SCALAR("item 1",plain) + * BLOCK-ENTRY + * SCALAR("item 2",plain) + * BLOCK-END + * BLOCK-ENTRY + * BLOCK-MAPPING-START + * KEY + * SCALAR("key 1",plain) + * VALUE + * SCALAR("value 1",plain) + * KEY + * SCALAR("key 2",plain) + * VALUE + * SCALAR("value 2",plain) + * BLOCK-END + * BLOCK-ENTRY + * BLOCK-MAPPING-START + * KEY + * SCALAR("complex key") + * VALUE + * SCALAR("complex value") + * BLOCK-END + * BLOCK-END + * STREAM-END + * + * 2. Collections in a mapping: + * + * ? a sequence + * : - item 1 + * - item 2 + * ? a mapping + * : key 1: value 1 + * key 2: value 2 + * + * Tokens: + * + * STREAM-START(utf-8) + * BLOCK-MAPPING-START + * KEY + * SCALAR("a sequence",plain) + * VALUE + * BLOCK-SEQUENCE-START + * BLOCK-ENTRY + * SCALAR("item 1",plain) + * BLOCK-ENTRY + * SCALAR("item 2",plain) + * BLOCK-END + * KEY + * SCALAR("a mapping",plain) + * VALUE + * BLOCK-MAPPING-START + * KEY + * SCALAR("key 1",plain) + * VALUE + * SCALAR("value 1",plain) + * KEY + * SCALAR("key 2",plain) + * VALUE + * SCALAR("value 2",plain) + * BLOCK-END + * BLOCK-END + * STREAM-END + * + * YAML also permits non-indented sequences if they are included into a block + * mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: + * + * key: + * - item 1 # BLOCK-SEQUENCE-START is NOT produced here. + * - item 2 + * + * Tokens: + * + * STREAM-START(utf-8) + * BLOCK-MAPPING-START + * KEY + * SCALAR("key",plain) + * VALUE + * BLOCK-ENTRY + * SCALAR("item 1",plain) + * BLOCK-ENTRY + * SCALAR("item 2",plain) + * BLOCK-END + */ + +/* + * Ensure that the buffer contains the required number of characters. + * Return 1 on success, 0 on failure (reader error or memory error). + */ +func cache(parser *yaml_parser_t, length int) bool { + if parser.unread >= length { + return true + } + + return yaml_parser_update_buffer(parser, length) +} + +/* + * Advance the buffer pointer. + */ +func skip(parser *yaml_parser_t) { + parser.mark.index++ + parser.mark.column++ + parser.unread-- + parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) +} + +func skip_line(parser *yaml_parser_t) { + if is_crlf_at(parser.buffer, parser.buffer_pos) { + parser.mark.index += 2 + parser.mark.column = 0 + parser.mark.line++ + parser.unread -= 2 + parser.buffer_pos += 2 + } else if is_break_at(parser.buffer, parser.buffer_pos) { + parser.mark.index++ + parser.mark.column = 0 + parser.mark.line++ + parser.unread-- + parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) + } +} + +/* + * Copy a character to a string buffer and advance pointers. + */ + +func read(parser *yaml_parser_t, s []byte) []byte { + w := width(parser.buffer[parser.buffer_pos]) + if w == 0 { + panic("invalid character sequence") + } + if len(s) == 0 { + s = make([]byte, 0, 32) + } + if w == 1 && len(s)+w <= cap(s) { + s = s[:len(s)+1] + s[len(s)-1] = parser.buffer[parser.buffer_pos] + parser.buffer_pos++ + } else { + s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) + parser.buffer_pos += w + } + parser.mark.index++ + parser.mark.column++ + parser.unread-- + return s +} + +/* + * Copy a line break character to a string buffer and advance pointers. + */ +func read_line(parser *yaml_parser_t, s []byte) []byte { + buf := parser.buffer + pos := parser.buffer_pos + if buf[pos] == '\r' && buf[pos+1] == '\n' { + /* CR LF . LF */ + s = append(s, '\n') + parser.buffer_pos += 2 + parser.mark.index++ + parser.unread-- + } else if buf[pos] == '\r' || buf[pos] == '\n' { + /* CR|LF . LF */ + s = append(s, '\n') + parser.buffer_pos += 1 + } else if buf[pos] == '\xC2' && buf[pos+1] == '\x85' { + /* NEL . LF */ + s = append(s, '\n') + parser.buffer_pos += 2 + } else if buf[pos] == '\xE2' && buf[pos+1] == '\x80' && + (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9') { + // LS|PS . LS|PS + s = append(s, buf[parser.buffer_pos:pos+3]...) + parser.buffer_pos += 3 + } else { + return s + } + + parser.mark.index++ + parser.mark.column = 0 + parser.mark.line++ + parser.unread-- + return s +} + +/* + * Get the next token. + */ + +func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { + /* Erase the token object. */ + *token = yaml_token_t{} + + /* No tokens after STREAM-END or error. */ + + if parser.stream_end_produced || parser.error != yaml_NO_ERROR { + return true + } + + /* Ensure that the tokens queue contains enough tokens. */ + + if !parser.token_available { + if !yaml_parser_fetch_more_tokens(parser) { + return false + } + } + + /* Fetch the next token from the queue. */ + + *token = parser.tokens[parser.tokens_head] + parser.tokens_head++ + parser.token_available = false + parser.tokens_parsed++ + + if token.token_type == yaml_STREAM_END_TOKEN { + parser.stream_end_produced = true + } + + return true +} + +/* + * Set the scanner error and return 0. + */ + +func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, + context_mark YAML_mark_t, problem string) bool { + parser.error = yaml_SCANNER_ERROR + parser.context = context + parser.context_mark = context_mark + parser.problem = problem + parser.problem_mark = parser.mark + + return false +} + +func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark YAML_mark_t, problem string) bool { + context := "while parsing a %TAG directive" + if directive { + context = "while parsing a tag" + } + return yaml_parser_set_scanner_error(parser, context, context_mark, "did not find URI escaped octet") +} + +/* + * Ensure that the tokens queue contains at least one token which can be + * returned to the Parser. + */ + +func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { + /* While we need more tokens to fetch, do it. */ + + for { + /* + * Check if we really need to fetch more tokens. + */ + + need_more_tokens := false + + if parser.tokens_head == len(parser.tokens) { + /* Queue is empty. */ + + need_more_tokens = true + } else { + + /* Check if any potential simple key may occupy the head position. */ + + if !yaml_parser_stale_simple_keys(parser) { + return false + } + + for i := range parser.simple_keys { + simple_key := &parser.simple_keys[i] + + if simple_key.possible && + simple_key.token_number == parser.tokens_parsed { + need_more_tokens = true + break + } + } + } + if len(parser.simple_keys) > 0 { + + } + /* We are finished. */ + + if !need_more_tokens { + break + } + + /* Fetch the next token. */ + + if !yaml_parser_fetch_next_token(parser) { + return false + } + + } + + parser.token_available = true + + return true +} + +/* + * The dispatcher for token fetchers. + */ + +func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool { + /* Ensure that the buffer is initialized. */ + + if !cache(parser, 1) { + return false + } + + /* Check if we just started scanning. Fetch STREAM-START then. */ + + if !parser.stream_start_produced { + return yaml_parser_fetch_stream_start(parser) + } + + /* Eat whitespaces and comments until we reach the next token. */ + + if !yaml_parser_scan_to_next_token(parser) { + return false + } + + /* Remove obsolete potential simple keys. */ + + if !yaml_parser_stale_simple_keys(parser) { + return false + } + + /* Check the indentation level against the current column. */ + + if !yaml_parser_unroll_indent(parser, parser.mark.column) { + return false + } + + /* + * Ensure that the buffer contains at least 4 characters. 4 is the length + * of the longest indicators ('--- ' and '... '). + */ + + if !cache(parser, 4) { + return false + } + + /* Is it the end of the stream? */ + buf := parser.buffer + pos := parser.buffer_pos + + if is_z(buf[pos]) { + return yaml_parser_fetch_stream_end(parser) + } + + /* Is it a directive? */ + + if parser.mark.column == 0 && buf[pos] == '%' { + return yaml_parser_fetch_directive(parser) + } + + /* Is it the document start indicator? */ + + if parser.mark.column == 0 && + buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && + is_blankz_at(buf, pos+3) { + return yaml_parser_fetch_document_indicator(parser, + yaml_DOCUMENT_START_TOKEN) + } + + /* Is it the document end indicator? */ + + if parser.mark.column == 0 && + buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && + is_blankz_at(buf, pos+3) { + return yaml_parser_fetch_document_indicator(parser, + yaml_DOCUMENT_END_TOKEN) + } + + /* Is it the flow sequence start indicator? */ + + if buf[pos] == '[' { + return yaml_parser_fetch_flow_collection_start(parser, + yaml_FLOW_SEQUENCE_START_TOKEN) + } + + /* Is it the flow mapping start indicator? */ + + if buf[pos] == '{' { + return yaml_parser_fetch_flow_collection_start(parser, + yaml_FLOW_MAPPING_START_TOKEN) + } + + /* Is it the flow sequence end indicator? */ + + if buf[pos] == ']' { + return yaml_parser_fetch_flow_collection_end(parser, + yaml_FLOW_SEQUENCE_END_TOKEN) + } + + /* Is it the flow mapping end indicator? */ + + if buf[pos] == '}' { + return yaml_parser_fetch_flow_collection_end(parser, + yaml_FLOW_MAPPING_END_TOKEN) + } + + /* Is it the flow entry indicator? */ + + if buf[pos] == ',' { + return yaml_parser_fetch_flow_entry(parser) + } + + /* Is it the block entry indicator? */ + if buf[pos] == '-' && is_blankz_at(buf, pos+1) { + return yaml_parser_fetch_block_entry(parser) + } + + /* Is it the key indicator? */ + if buf[pos] == '?' && + (parser.flow_level > 0 || is_blankz_at(buf, pos+1)) { + return yaml_parser_fetch_key(parser) + } + + /* Is it the value indicator? */ + if buf[pos] == ':' && + (parser.flow_level > 0 || is_blankz_at(buf, pos+1)) { + return yaml_parser_fetch_value(parser) + } + + /* Is it an alias? */ + if buf[pos] == '*' { + return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) + } + + /* Is it an anchor? */ + + if buf[pos] == '&' { + return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) + } + + /* Is it a tag? */ + + if buf[pos] == '!' { + return yaml_parser_fetch_tag(parser) + } + + /* Is it a literal scalar? */ + if buf[pos] == '|' && parser.flow_level == 0 { + return yaml_parser_fetch_block_scalar(parser, true) + } + + /* Is it a folded scalar? */ + if buf[pos] == '>' && parser.flow_level == 0 { + return yaml_parser_fetch_block_scalar(parser, false) + } + + /* Is it a single-quoted scalar? */ + + if buf[pos] == '\'' { + return yaml_parser_fetch_flow_scalar(parser, true) + } + + /* Is it a double-quoted scalar? */ + if buf[pos] == '"' { + return yaml_parser_fetch_flow_scalar(parser, false) + } + + /* + * Is it a plain scalar? + * + * A plain scalar may start with any non-blank characters except + * + * '-', '?', ':', ',', '[', ']', '{', '}', + * '#', '&', '*', '!', '|', '>', '\'', '\"', + * '%', '@', '`'. + * + * In the block context (and, for the '-' indicator, in the flow context + * too), it may also start with the characters + * + * '-', '?', ':' + * + * if it is followed by a non-space character. + * + * The last rule is more restrictive than the specification requires. + */ + + b := buf[pos] + if !(is_blankz_at(buf, pos) || b == '-' || + b == '?' || b == ':' || + b == ',' || b == '[' || + b == ']' || b == '{' || + b == '}' || b == '#' || + b == '&' || b == '*' || + b == '!' || b == '|' || + b == '>' || b == '\'' || + b == '"' || b == '%' || + b == '@' || b == '`') || + (b == '-' && !is_blank(buf[pos+1])) || + (parser.flow_level == 0 && + (buf[pos] == '?' || buf[pos+1] == ':') && + !is_blank(buf[pos+1])) { + return yaml_parser_fetch_plain_scalar(parser) + } + + /* + * If we don't determine the token type so far, it is an error. + */ + + return yaml_parser_set_scanner_error(parser, + "while scanning for the next token", parser.mark, + "found character that cannot start any token") +} + +/* + * Check the list of potential simple keys and remove the positions that + * cannot contain simple keys anymore. + */ + +func yaml_parser_stale_simple_keys(parser *yaml_parser_t) bool { + /* Check for a potential simple key for each flow level. */ + + for i := range parser.simple_keys { + /* + * The specification requires that a simple key + * + * - is limited to a single line, + * - is shorter than 1024 characters. + */ + + simple_key := &parser.simple_keys[i] + if simple_key.possible && + (simple_key.mark.line < parser.mark.line || + simple_key.mark.index+1024 < parser.mark.index) { + + /* Check if the potential simple key to be removed is required. */ + + if simple_key.required { + return yaml_parser_set_scanner_error(parser, + "while scanning a simple key", simple_key.mark, + "could not find expected ':'") + } + + simple_key.possible = false + } + } + + return true +} + +/* + * Check if a simple key may start at the current position and add it if + * needed. + */ + +func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { + /* + * A simple key is required at the current position if the scanner is in + * the block context and the current column coincides with the indentation + * level. + */ + + required := (parser.flow_level == 0 && + parser.indent == parser.mark.column) + + /* + * A simple key is required only when it is the first token in the current + * line. Therefore it is always allowed. But we add a check anyway. + */ + if required && !parser.simple_key_allowed { + panic("impossible") /* Impossible. */ + } + + /* + * If the current position may start a simple key, save it. + */ + + if parser.simple_key_allowed { + simple_key := yaml_simple_key_t{ + possible: true, + required: required, + token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), + } + simple_key.mark = parser.mark + + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_keys[len(parser.simple_keys)-1] = simple_key + } + + return true +} + +/* + * Remove a potential simple key at the current flow level. + */ + +func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { + simple_key := &parser.simple_keys[len(parser.simple_keys)-1] + + if simple_key.possible { + /* If the key is required, it is an error. */ + + if simple_key.required { + return yaml_parser_set_scanner_error(parser, + "while scanning a simple key", simple_key.mark, + "could not find expected ':'") + } + } + + /* Remove the key from the stack. */ + + simple_key.possible = false + + return true +} + +/* + * Increase the flow level and resize the simple key list if needed. + */ + +func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { + /* Reset the simple key on the next level. */ + + parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) + + /* Increase the flow level. */ + + parser.flow_level++ + + return true +} + +/* + * Decrease the flow level. + */ + +func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { + if parser.flow_level > 0 { + parser.flow_level-- + parser.simple_keys = parser.simple_keys[:len(parser.simple_keys)-1] + } + + return true +} + +/* + * Push the current indentation level to the stack and set the new level + * the current column is greater than the indentation level. In this case, + * append or insert the specified token into the token queue. + * + */ + +func yaml_parser_roll_indent(parser *yaml_parser_t, column int, + number int, token_type yaml_token_type_t, mark YAML_mark_t) bool { + /* In the flow context, do nothing. */ + + if parser.flow_level > 0 { + return true + } + + if parser.indent == -1 || parser.indent < column { + /* + * Push the current indentation level to the stack and set the new + * indentation level. + */ + + parser.indents = append(parser.indents, parser.indent) + parser.indent = column + + /* Create a token and insert it into the queue. */ + token := yaml_token_t{ + token_type: token_type, + start_mark: mark, + end_mark: mark, + } + + // number == -1 -> enqueue otherwise insert + if number > -1 { + number -= parser.tokens_parsed + } + insert_token(parser, number, &token) + } + + return true +} + +/* + * Pop indentation levels from the indents stack until the current level + * becomes less or equal to the column. For each indentation level, append + * the BLOCK-END token. + */ + +func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool { + /* In the flow context, do nothing. */ + + if parser.flow_level > 0 { + return true + } + + /* + * column is unsigned and parser->indent is signed, so if + * parser->indent is less than zero the conditional in the while + * loop below is incorrect. Guard against that. + */ + + if parser.indent < 0 { + return true + } + + /* Loop through the indentation levels in the stack. */ + + for parser.indent > column { + /* Create a token and append it to the queue. */ + token := yaml_token_t{ + token_type: yaml_BLOCK_END_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + } + insert_token(parser, -1, &token) + + /* Pop the indentation level. */ + parser.indent = parser.indents[len(parser.indents)-1] + parser.indents = parser.indents[:len(parser.indents)-1] + + } + + return true +} + +/* + * Pop indentation levels from the indents stack until the current + * level resets to -1. For each indentation level, append the + * BLOCK-END token. + */ + +func yaml_parser_reset_indent(parser *yaml_parser_t) bool { + /* In the flow context, do nothing. */ + + if parser.flow_level > 0 { + return true + } + + /* Loop through the indentation levels in the stack. */ + + for parser.indent > -1 { + /* Create a token and append it to the queue. */ + + token := yaml_token_t{ + token_type: yaml_BLOCK_END_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + } + insert_token(parser, -1, &token) + + /* Pop the indentation level. */ + parser.indent = parser.indents[len(parser.indents)-1] + parser.indents = parser.indents[:len(parser.indents)-1] + } + + return true +} + +/* + * Initialize the scanner and produce the STREAM-START token. + */ + +func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { + /* Set the initial indentation. */ + + parser.indent = -1 + + /* Initialize the simple key stack. */ + parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) + + /* A simple key is allowed at the beginning of the stream. */ + + parser.simple_key_allowed = true + + /* We have started. */ + + parser.stream_start_produced = true + + /* Create the STREAM-START token and append it to the queue. */ + token := yaml_token_t{ + token_type: yaml_STREAM_START_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + encoding: parser.encoding, + } + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the STREAM-END token and shut down the scanner. + */ + +func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { + /* Force new line. */ + + if parser.mark.column != 0 { + parser.mark.column = 0 + parser.mark.line++ + } + + /* Reset the indentation level. */ + + if !yaml_parser_reset_indent(parser) { + return false + } + + /* Reset simple keys. */ + + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + /* Create the STREAM-END token and append it to the queue. */ + token := yaml_token_t{ + token_type: yaml_STREAM_END_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. + */ + +func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { + /* Reset the indentation level. */ + + if !yaml_parser_reset_indent(parser) { + return false + } + + /* Reset simple keys. */ + + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + /* Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. */ + var token yaml_token_t + if !yaml_parser_scan_directive(parser, &token) { + return false + } + + /* Append the token to the queue. */ + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the DOCUMENT-START or DOCUMENT-END token. + */ + +func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, + token_type yaml_token_type_t) bool { + + /* Reset the indentation level. */ + + if !yaml_parser_reset_indent(parser) { + return false + } + + /* Reset simple keys. */ + + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + /* Consume the token. */ + + start_mark := parser.mark + + skip(parser) + skip(parser) + skip(parser) + + end_mark := parser.mark + + /* Create the DOCUMENT-START or DOCUMENT-END token. */ + + token := yaml_token_t{ + token_type: token_type, + start_mark: start_mark, + end_mark: end_mark, + } + + /* Append the token to the queue. */ + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. + */ + +func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, + token_type yaml_token_type_t) bool { + + /* The indicators '[' and '{' may start a simple key. */ + + if !yaml_parser_save_simple_key(parser) { + return false + } + + /* Increase the flow level. */ + + if !yaml_parser_increase_flow_level(parser) { + return false + } + + /* A simple key may follow the indicators '[' and '{'. */ + + parser.simple_key_allowed = true + + /* Consume the token. */ + + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + /* Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. */ + + token := yaml_token_t{ + token_type: token_type, + start_mark: start_mark, + end_mark: end_mark, + } + + /* Append the token to the queue. */ + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. + */ + +func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, + token_type yaml_token_type_t) bool { + + /* Reset any potential simple key on the current flow level. */ + + if !yaml_parser_remove_simple_key(parser) { + return false + } + + /* Decrease the flow level. */ + + if !yaml_parser_decrease_flow_level(parser) { + return false + } + + /* No simple keys after the indicators ']' and '}'. */ + + parser.simple_key_allowed = false + + /* Consume the token. */ + + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + /* Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. */ + + token := yaml_token_t{ + token_type: token_type, + start_mark: start_mark, + end_mark: end_mark, + } + + /* Append the token to the queue. */ + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the FLOW-ENTRY token. + */ + +func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { + + /* Reset any potential simple keys on the current flow level. */ + + if !yaml_parser_remove_simple_key(parser) { + return false + } + + /* Simple keys are allowed after ','. */ + + parser.simple_key_allowed = true + + /* Consume the token. */ + + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + /* Create the FLOW-ENTRY token and append it to the queue. */ + + token := yaml_token_t{ + token_type: yaml_FLOW_ENTRY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the BLOCK-ENTRY token. + */ + +func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { + + /* Check if the scanner is in the block context. */ + + if parser.flow_level == 0 { + /* Check if we are allowed to start a new entry. */ + + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "block sequence entries are not allowed in this context") + } + + /* Add the BLOCK-SEQUENCE-START token if needed. */ + + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, + yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { + return false + } + } else { + /* + * It is an error for the '-' indicator to occur in the flow context, + * but we let the Parser detect and report about it because the Parser + * is able to point to the context. + */ + } + + /* Reset any potential simple keys on the current flow level. */ + + if !yaml_parser_remove_simple_key(parser) { + return false + } + + /* Simple keys are allowed after '-'. */ + + parser.simple_key_allowed = true + + /* Consume the token. */ + + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + /* Create the BLOCK-ENTRY token and append it to the queue. */ + + token := yaml_token_t{ + token_type: yaml_BLOCK_ENTRY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the KEY token. + */ + +func yaml_parser_fetch_key(parser *yaml_parser_t) bool { + /* In the block context, additional checks are required. */ + + if parser.flow_level == 0 { + /* Check if we are allowed to start a new key (not nessesary simple). */ + + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "mapping keys are not allowed in this context") + } + + /* Add the BLOCK-MAPPING-START token if needed. */ + + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, + yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { + return false + } + } + + /* Reset any potential simple keys on the current flow level. */ + + if !yaml_parser_remove_simple_key(parser) { + return false + } + + /* Simple keys are allowed after '?' in the block context. */ + + parser.simple_key_allowed = (parser.flow_level == 0) + + /* Consume the token. */ + + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + /* Create the KEY token and append it to the queue. */ + + token := yaml_token_t{ + token_type: yaml_KEY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the VALUE token. + */ + +func yaml_parser_fetch_value(parser *yaml_parser_t) bool { + + simple_key := &parser.simple_keys[len(parser.simple_keys)-1] + + /* Have we found a simple key? */ + + if simple_key.possible { + + /* Create the KEY token and insert it into the queue. */ + + token := yaml_token_t{ + token_type: yaml_KEY_TOKEN, + start_mark: simple_key.mark, + end_mark: simple_key.mark, + } + + insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) + + /* In the block context, we may need to add the BLOCK-MAPPING-START token. */ + + if !yaml_parser_roll_indent(parser, simple_key.mark.column, + simple_key.token_number, + yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { + return false + } + + /* Remove the simple key. */ + + simple_key.possible = false + + /* A simple key cannot follow another simple key. */ + + parser.simple_key_allowed = false + } else { + /* The ':' indicator follows a complex key. */ + + /* In the block context, extra checks are required. */ + + if parser.flow_level == 0 { + /* Check if we are allowed to start a complex value. */ + + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "mapping values are not allowed in this context") + } + + /* Add the BLOCK-MAPPING-START token if needed. */ + + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, + yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { + return false + } + } + + /* Simple keys after ':' are allowed in the block context. */ + + parser.simple_key_allowed = (parser.flow_level == 0) + } + + /* Consume the token. */ + + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + /* Create the VALUE token and append it to the queue. */ + + token := yaml_token_t{ + token_type: yaml_VALUE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the ALIAS or ANCHOR token. + */ + +func yaml_parser_fetch_anchor(parser *yaml_parser_t, token_type yaml_token_type_t) bool { + + /* An anchor or an alias could be a simple key. */ + + if !yaml_parser_save_simple_key(parser) { + return false + } + + /* A simple key cannot follow an anchor or an alias. */ + + parser.simple_key_allowed = false + + /* Create the ALIAS or ANCHOR token and append it to the queue. */ + var token yaml_token_t + if !yaml_parser_scan_anchor(parser, &token, token_type) { + return false + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the TAG token. + */ + +func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { + /* A tag could be a simple key. */ + + if !yaml_parser_save_simple_key(parser) { + return false + } + + /* A simple key cannot follow a tag. */ + + parser.simple_key_allowed = false + + /* Create the TAG token and append it to the queue. */ + var token yaml_token_t + if !yaml_parser_scan_tag(parser, &token) { + return false + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. + */ + +func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { + /* Remove any potential simple keys. */ + + if !yaml_parser_remove_simple_key(parser) { + return false + } + + /* A simple key may follow a block scalar. */ + + parser.simple_key_allowed = true + + /* Create the SCALAR token and append it to the queue. */ + var token yaml_token_t + if !yaml_parser_scan_block_scalar(parser, &token, literal) { + return false + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. + */ + +func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { + + /* A plain scalar could be a simple key. */ + + if !yaml_parser_save_simple_key(parser) { + return false + } + + /* A simple key cannot follow a flow scalar. */ + + parser.simple_key_allowed = false + + /* Create the SCALAR token and append it to the queue. */ + var token yaml_token_t + if !yaml_parser_scan_flow_scalar(parser, &token, single) { + return false + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Produce the SCALAR(...,plain) token. + */ + +func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { + /* A plain scalar could be a simple key. */ + + if !yaml_parser_save_simple_key(parser) { + return false + } + + /* A simple key cannot follow a flow scalar. */ + + parser.simple_key_allowed = false + + /* Create the SCALAR token and append it to the queue. */ + var token yaml_token_t + if !yaml_parser_scan_plain_scalar(parser, &token) { + return false + } + + insert_token(parser, -1, &token) + + return true +} + +/* + * Eat whitespaces and comments until the next token is found. + */ + +func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { + /* Until the next token is not found. */ + + for { + /* Allow the BOM mark to start a line. */ + + if !cache(parser, 1) { + return false + } + + if parser.mark.column == 0 && is_bom_at(parser.buffer, parser.buffer_pos) { + skip(parser) + } + + /* + * Eat whitespaces. + * + * Tabs are allowed: + * + * - in the flow context; + * - in the block context, but not at the beginning of the line or + * after '-', '?', or ':' (complex value). + */ + + if !cache(parser, 1) { + return false + } + + for parser.buffer[parser.buffer_pos] == ' ' || + ((parser.flow_level > 0 || !parser.simple_key_allowed) && + parser.buffer[parser.buffer_pos] == '\t') { + skip(parser) + if !cache(parser, 1) { + return false + } + } + + /* Eat a comment until a line break. */ + + if parser.buffer[parser.buffer_pos] == '#' { + for !is_breakz_at(parser.buffer, parser.buffer_pos) { + skip(parser) + if !cache(parser, 1) { + return false + } + } + } + + /* If it is a line break, eat it. */ + + if is_break_at(parser.buffer, parser.buffer_pos) { + if !cache(parser, 2) { + return false + } + skip_line(parser) + + /* In the block context, a new line may start a simple key. */ + + if parser.flow_level == 0 { + parser.simple_key_allowed = true + } + } else { + /* We have found a token. */ + + break + } + } + + return true +} + +/* + * Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. + * + * Scope: + * %YAML 1.1 # a comment \n + * ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + * %TAG !yaml! tag:yaml.org,2002: \n + * ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + */ + +func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { + /* Eat '%'. */ + + start_mark := parser.mark + + skip(parser) + + /* Scan the directive name. */ + var name []byte + if !yaml_parser_scan_directive_name(parser, start_mark, &name) { + return false + } + + /* Is it a YAML directive? */ + var major, minor int + if bytes.Equal(name, []byte("YAML")) { + /* Scan the VERSION directive value. */ + + if !yaml_parser_scan_version_directive_value(parser, start_mark, + &major, &minor) { + return false + } + + end_mark := parser.mark + + /* Create a VERSION-DIRECTIVE token. */ + + *token = yaml_token_t{ + token_type: yaml_VERSION_DIRECTIVE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + major: major, + minor: minor, + } + } else if bytes.Equal(name, []byte("TAG")) { + /* Is it a TAG directive? */ + /* Scan the TAG directive value. */ + var handle, prefix []byte + if !yaml_parser_scan_tag_directive_value(parser, start_mark, + &handle, &prefix) { + return false + } + + end_mark := parser.mark + + /* Create a TAG-DIRECTIVE token. */ + + *token = yaml_token_t{ + token_type: yaml_TAG_DIRECTIVE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: handle, + prefix: prefix, + } + } else { + /* Unknown directive. */ + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "found uknown directive name") + return false + } + + /* Eat the rest of the line including any comments. */ + + if !cache(parser, 1) { + return false + } + + for is_blank(parser.buffer[parser.buffer_pos]) { + skip(parser) + if !cache(parser, 1) { + return false + } + } + + if parser.buffer[parser.buffer_pos] == '#' { + for !is_breakz_at(parser.buffer, parser.buffer_pos) { + skip(parser) + if !cache(parser, 1) { + return false + } + } + } + + /* Check if we are at the end of the line. */ + + if !is_breakz_at(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "did not find expected comment or line break") + return false + } + + /* Eat a line break. */ + + if is_break_at(parser.buffer, parser.buffer_pos) { + if !cache(parser, 2) { + return false + } + skip_line(parser) + } + + return true +} + +/* + * Scan the directive name. + * + * Scope: + * %YAML 1.1 # a comment \n + * ^^^^ + * %TAG !yaml! tag:yaml.org,2002: \n + * ^^^ + */ + +func yaml_parser_scan_directive_name(parser *yaml_parser_t, + start_mark YAML_mark_t, name *[]byte) bool { + + /* Consume the directive name. */ + + if !cache(parser, 1) { + return false + } + + var s []byte + for is_alpha(parser.buffer[parser.buffer_pos]) { + s = read(parser, s) + if !cache(parser, 1) { + return false + } + } + + /* Check if the name is empty. */ + + if len(s) == 0 { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "could not find expected directive name") + return false + } + + /* Check for an blank character after the name. */ + + if !is_blankz_at(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "found unexpected non-alphabetical character") + return false + } + + *name = s + + return true +} + +/* + * Scan the value of VERSION-DIRECTIVE. + * + * Scope: + * %YAML 1.1 # a comment \n + * ^^^^^^ + */ + +func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, + start_mark YAML_mark_t, major *int, minor *int) bool { + /* Eat whitespaces. */ + + if !cache(parser, 1) { + return false + } + + for is_blank(parser.buffer[parser.buffer_pos]) { + skip(parser) + if !cache(parser, 1) { + return false + } + } + + /* Consume the major version number. */ + + if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { + return false + } + + /* Eat '.'. */ + + if parser.buffer[parser.buffer_pos] != '.' { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "did not find expected digit or '.' character") + } + + skip(parser) + + /* Consume the minor version number. */ + + if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { + return false + } + + return true +} + +const MAX_NUMBER_LENGTH = 9 + +/* + * Scan the version number of VERSION-DIRECTIVE. + * + * Scope: + * %YAML 1.1 # a comment \n + * ^ + * %YAML 1.1 # a comment \n + * ^ + */ + +func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, + start_mark YAML_mark_t, number *int) bool { + + /* Repeat while the next character is digit. */ + + if !cache(parser, 1) { + return false + } + + value := 0 + length := 0 + for is_digit(parser.buffer[parser.buffer_pos]) { + /* Check if the number is too long. */ + + length++ + if length > MAX_NUMBER_LENGTH { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "found extremely long version number") + } + + value = value*10 + as_digit(parser.buffer[parser.buffer_pos]) + + skip(parser) + + if !cache(parser, 1) { + return false + } + } + + /* Check if the number was present. */ + + if length == 0 { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "did not find expected version number") + } + + *number = value + + return true +} + +/* + * Scan the value of a TAG-DIRECTIVE token. + * + * Scope: + * %TAG !yaml! tag:yaml.org,2002: \n + * ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + */ + +func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, + start_mark YAML_mark_t, handle, prefix *[]byte) bool { + + /* Eat whitespaces. */ + + if !cache(parser, 1) { + return false + } + + for is_blank(parser.buffer[parser.buffer_pos]) { + skip(parser) + if !cache(parser, 1) { + return false + } + } + + /* Scan a handle. */ + var handle_value []byte + if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { + return false + } + + /* Expect a whitespace. */ + + if !cache(parser, 1) { + return false + } + + if !is_blank(parser.buffer[parser.buffer_pos]) { + yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", + start_mark, "did not find expected whitespace") + return false + } + + /* Eat whitespaces. */ + + for is_blank(parser.buffer[parser.buffer_pos]) { + skip(parser) + if !cache(parser, 1) { + return false + } + } + + /* Scan a prefix. */ + var prefix_value []byte + if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { + return false + } + + /* Expect a whitespace or line break. */ + + if !cache(parser, 1) { + return false + } + + if !is_blankz_at(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", + start_mark, "did not find expected whitespace or line break") + return false + } + + *handle = handle_value + *prefix = prefix_value + + return true +} + +func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, + token_type yaml_token_type_t) bool { + + /* Eat the indicator character. */ + + start_mark := parser.mark + + skip(parser) + + /* Consume the value. */ + + if !cache(parser, 1) { + return false + } + + var s []byte + for is_alpha(parser.buffer[parser.buffer_pos]) { + s = read(parser, s) + if !cache(parser, 1) { + return false + } + } + + end_mark := parser.mark + + /* + * Check if length of the anchor is greater than 0 and it is followed by + * a whitespace character or one of the indicators: + * + * '?', ':', ',', ']', '}', '%', '@', '`'. + */ + + b := parser.buffer[parser.buffer_pos] + if len(s) == 0 || !(is_blankz_at(parser.buffer, parser.buffer_pos) || b == '?' || + b == ':' || b == ',' || + b == ']' || b == '}' || + b == '%' || b == '@' || + b == '`') { + context := "while scanning an anchor" + if token_type != yaml_ANCHOR_TOKEN { + context = "while scanning an alias" + } + yaml_parser_set_scanner_error(parser, context, start_mark, + "did not find expected alphabetic or numeric character") + return false + } + + /* Create a token. */ + *token = yaml_token_t{ + token_type: token_type, + start_mark: start_mark, + end_mark: end_mark, + value: s, + } + + return true +} + +/* + * Scan a TAG token. + */ + +func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { + start_mark := parser.mark + + /* Check if the tag is in the canonical form. */ + + if !cache(parser, 2) { + return false + } + + var handle []byte + var suffix []byte + if parser.buffer[parser.buffer_pos+1] == '<' { + /* Set the handle to '' */ + + /* Eat '!<' */ + + skip(parser) + skip(parser) + + /* Consume the tag value. */ + + if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { + return false + } + + /* Check for '>' and eat it. */ + + if parser.buffer[parser.buffer_pos] != '>' { + yaml_parser_set_scanner_error(parser, "while scanning a tag", + start_mark, "did not find the expected '>'") + return false + } + + skip(parser) + } else if is_blank(parser.buffer[parser.buffer_pos+1]) { + // NON-SPECIFIED + skip(parser) + } else { + /* The tag has either the '!suffix' or the '!handle!suffix' form. */ + + /* First, try to scan a handle. */ + + if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { + return false + } + + /* Check if it is, indeed, handle. */ + + if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { + /* Scan the suffix now. */ + + if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { + return false + } + } else { + /* It wasn't a handle after all. Scan the rest of the tag. */ + + if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { + return false + } + + /* Set the handle to '!'. */ + + handle = []byte{'!'} + + /* + * A special case: the '!' tag. Set the handle to '' and the + * suffix to '!'. + */ + + if len(suffix) == 0 { + handle, suffix = suffix, handle + } + + } + } + + /* Check the character which ends the tag. */ + + if !cache(parser, 1) { + return false + } + + if !is_blankz_at(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a tag", + start_mark, "did not find expected whitespace or line break") + return false + } + + end_mark := parser.mark + + /* Create a token. */ + + *token = yaml_token_t{ + token_type: yaml_TAG_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: handle, + suffix: suffix, + } + + return true +} + +/* + * Scan a tag handle. + */ + +func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, + start_mark YAML_mark_t, handle *[]byte) bool { + + /* Check the initial '!' character. */ + + if !cache(parser, 1) { + return false + } + + if parser.buffer[parser.buffer_pos] != '!' { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected '!'") + return false + } + + /* Copy the '!' character. */ + var s []byte + s = read(parser, s) + + /* Copy all subsequent alphabetical and numerical characters. */ + + if !cache(parser, 1) { + return false + } + + for is_alpha(parser.buffer[parser.buffer_pos]) { + s = read(parser, s) + if !cache(parser, 1) { + return false + } + } + + /* Check if the trailing character is '!' and copy it. */ + + if parser.buffer[parser.buffer_pos] == '!' { + s = read(parser, s) + } else { + /* + * It's either the '!' tag or not really a tag handle. If it's a %TAG + * directive, it's an error. If it's a tag token, it must be a part of + * URI. + */ + + if directive && !(s[0] == '!' && len(s) == 1) { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected '!'") + return false + } + } + + *handle = s + + return true +} + +/* + * Scan a tag. + */ + +func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, + head []byte, start_mark YAML_mark_t, uri *[]byte) bool { + + var s []byte + /* + * Copy the head if needed. + * + * Note that we don't copy the leading '!' character. + */ + if len(head) > 1 { + s = append(s, head[1:]...) + } + + /* Scan the tag. */ + if !cache(parser, 1) { + return false + } + + /* + * The set of characters that may appear in URI is as follows: + * + * '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', + * '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', + * '%'. + */ + + b := parser.buffer[parser.buffer_pos] + for is_alpha(b) || b == ';' || + b == '/' || b == '?' || + b == ':' || b == '@' || + b == '&' || b == '=' || + b == '+' || b == '$' || + b == ',' || b == '.' || + b == '!' || b == '~' || + b == '*' || b == '\'' || + b == '(' || b == ')' || + b == '[' || b == ']' || + b == '%' { + /* Check if it is a URI-escape sequence. */ + + if b == '%' { + if !yaml_parser_scan_uri_escapes(parser, + directive, start_mark, &s) { + return false + } + } else { + s = read(parser, s) + } + + if !cache(parser, 1) { + return false + } + b = parser.buffer[parser.buffer_pos] + } + + /* Check if the tag is non-empty. */ + + if len(s) == 0 { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected tag URI") + return false + } + + *uri = s + + return true +} + +/* + * Decode an URI-escape sequence corresponding to a single UTF-8 character. + */ + +func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, + start_mark YAML_mark_t, s *[]byte) bool { + + /* Decode the required number of characters. */ + w := 10 + for w > 0 { + + /* Check for a URI-escaped octet. */ + + if !cache(parser, 3) { + return false + } + + if !(parser.buffer[parser.buffer_pos] == '%' && + is_hex(parser.buffer[parser.buffer_pos+1]) && + is_hex(parser.buffer[parser.buffer_pos+2])) { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find URI escaped octet") + } + + /* Get the octet. */ + octet := byte((as_hex(parser.buffer[parser.buffer_pos+1]) << 4) + + as_hex(parser.buffer[parser.buffer_pos+2])) + + /* If it is the leading octet, determine the length of the UTF-8 sequence. */ + + if w == 10 { + w = width(octet) + if w == 0 { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "found an incorrect leading UTF-8 octet") + } + } else { + /* Check if the trailing octet is correct. */ + + if (octet & 0xC0) != 0x80 { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "found an incorrect trailing UTF-8 octet") + } + } + + /* Copy the octet and move the pointers. */ + + *s = append(*s, octet) + skip(parser) + skip(parser) + skip(parser) + w-- + } + + return true +} + +/* + * Scan a block scalar. + */ + +func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, + literal bool) bool { + + /* Eat the indicator '|' or '>'. */ + + start_mark := parser.mark + + skip(parser) + + /* Scan the additional block scalar indicators. */ + + if !cache(parser, 1) { + return false + } + + /* Check for a chomping indicator. */ + chomping := 0 + increment := 0 + if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { + /* Set the chomping method and eat the indicator. */ + + if parser.buffer[parser.buffer_pos] == '+' { + chomping = +1 + } else { + chomping = -1 + } + + skip(parser) + + /* Check for an indentation indicator. */ + + if !cache(parser, 1) { + return false + } + + if is_digit(parser.buffer[parser.buffer_pos]) { + /* Check that the indentation is greater than 0. */ + + if parser.buffer[parser.buffer_pos] == '0' { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found an indentation indicator equal to 0") + return false + } + + /* Get the indentation level and eat the indicator. */ + + increment = as_digit(parser.buffer[parser.buffer_pos]) + + skip(parser) + } + } else if is_digit(parser.buffer[parser.buffer_pos]) { + + /* Do the same as above, but in the opposite order. */ + if parser.buffer[parser.buffer_pos] == '0' { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found an indentation indicator equal to 0") + return false + } + + increment = as_digit(parser.buffer[parser.buffer_pos]) + + skip(parser) + + if !cache(parser, 1) { + return false + } + + if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { + if parser.buffer[parser.buffer_pos] == '+' { + chomping = +1 + } else { + chomping = -1 + } + + skip(parser) + } + } + + /* Eat whitespaces and comments to the end of the line. */ + + if !cache(parser, 1) { + return false + } + + for is_blank(parser.buffer[parser.buffer_pos]) { + skip(parser) + if !cache(parser, 1) { + return false + } + } + + if parser.buffer[parser.buffer_pos] == '#' { + for !is_breakz_at(parser.buffer, parser.buffer_pos) { + skip(parser) + if !cache(parser, 1) { + return false + } + } + } + + /* Check if we are at the end of the line. */ + + if !is_breakz_at(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "did not find expected comment or line break") + return false + } + + /* Eat a line break. */ + + if is_break_at(parser.buffer, parser.buffer_pos) { + if !cache(parser, 2) { + return false + } + + skip_line(parser) + } + + end_mark := parser.mark + + /* Set the indentation level if it was specified. */ + indent := 0 + if increment > 0 { + if parser.indent >= 0 { + indent = parser.indent + increment + } else { + indent = increment + } + } + + /* Scan the leading line breaks and determine the indentation level if needed. */ + var trailing_breaks []byte + if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, + start_mark, &end_mark) { + return false + } + + /* Scan the block scalar content. */ + + if !cache(parser, 1) { + return false + } + + var s []byte + var leading_break []byte + leading_blank := false + trailing_blank := false + for parser.mark.column == indent && !is_z(parser.buffer[parser.buffer_pos]) { + + /* + * We are at the beginning of a non-empty line. + */ + + /* Is it a trailing whitespace? */ + + trailing_blank = is_blank(parser.buffer[parser.buffer_pos]) + + /* Check if we need to fold the leading line break. */ + + if !literal && len(leading_break) > 0 && leading_break[0] == '\n' && + !leading_blank && !trailing_blank { + /* Do we need to join the lines by space? */ + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } + leading_break = leading_break[:0] + } else { + s = append(s, leading_break...) + leading_break = leading_break[:0] + } + + /* Append the remaining line breaks. */ + s = append(s, trailing_breaks...) + trailing_breaks = trailing_breaks[:0] + + /* Is it a leading whitespace? */ + + leading_blank = is_blank(parser.buffer[parser.buffer_pos]) + + /* Consume the current line. */ + + for !is_breakz_at(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if !cache(parser, 1) { + return false + } + } + + /* Consume the line break. */ + + if !cache(parser, 2) { + return false + } + + leading_break = read_line(parser, leading_break) + + /* Eat the following indentation spaces and line breaks. */ + + if !yaml_parser_scan_block_scalar_breaks(parser, + &indent, &trailing_breaks, start_mark, &end_mark) { + return false + } + } + + /* Chomp the tail. */ + + if chomping != -1 { + s = append(s, leading_break...) + } + if chomping == 1 { + s = append(s, trailing_breaks...) + } + + /* Create a token. */ + + *token = yaml_token_t{ + token_type: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_LITERAL_SCALAR_STYLE, + } + if !literal { + token.style = yaml_FOLDED_SCALAR_STYLE + } + + return true +} + +/* + * Scan indentation spaces and line breaks for a block scalar. Determine the + * indentation level if needed. + */ + +func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, + indent *int, breaks *[]byte, + start_mark YAML_mark_t, end_mark *YAML_mark_t) bool { + + *end_mark = parser.mark + + /* Eat the indentation spaces and line breaks. */ + max_indent := 0 + for { + /* Eat the indentation spaces. */ + + if !cache(parser, 1) { + return false + } + + for (*indent == 0 || parser.mark.column < *indent) && + is_space(parser.buffer[parser.buffer_pos]) { + skip(parser) + if !cache(parser, 1) { + return false + } + } + if parser.mark.column > max_indent { + max_indent = parser.mark.column + } + + /* Check for a tab character messing the indentation. */ + + if (*indent == 0 || parser.mark.column < *indent) && + is_tab(parser.buffer[parser.buffer_pos]) { + return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found a tab character where an indentation space is expected") + } + + /* Have we found a non-empty line? */ + + if !is_break_at(parser.buffer, parser.buffer_pos) { + break + } + + /* Consume the line break. */ + + if !cache(parser, 2) { + return false + } + + *breaks = read_line(parser, *breaks) + *end_mark = parser.mark + } + + /* Determine the indentation level if needed. */ + + if *indent == 0 { + *indent = max_indent + if *indent < parser.indent+1 { + *indent = parser.indent + 1 + } + if *indent < 1 { + *indent = 1 + } + } + + return true +} + +/* + * Scan a quoted scalar. + */ + +func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, + single bool) bool { + + /* Eat the left quote. */ + + start_mark := parser.mark + + skip(parser) + + /* Consume the content of the quoted scalar. */ + var s []byte + var leading_break []byte + var trailing_breaks []byte + var whitespaces []byte + for { + /* Check that there are no document indicators at the beginning of the line. */ + + if !cache(parser, 4) { + return false + } + + if parser.mark.column == 0 && + ((parser.buffer[parser.buffer_pos] == '-' && + parser.buffer[parser.buffer_pos+1] == '-' && + parser.buffer[parser.buffer_pos+2] == '-') || + (parser.buffer[parser.buffer_pos] == '.' && + parser.buffer[parser.buffer_pos+1] == '.' && + parser.buffer[parser.buffer_pos+2] == '.')) && + is_blankz_at(parser.buffer, parser.buffer_pos+3) { + yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", + start_mark, "found unexpected document indicator") + return false + } + + /* Check for EOF. */ + + if is_z(parser.buffer[parser.buffer_pos]) { + yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", + start_mark, "found unexpected end of stream") + return false + } + + /* Consume non-blank characters. */ + + if !cache(parser, 2) { + return false + } + + leading_blanks := false + + for !is_blankz_at(parser.buffer, parser.buffer_pos) { + /* Check for an escaped single quote. */ + + if single && parser.buffer[parser.buffer_pos] == '\'' && + parser.buffer[parser.buffer_pos+1] == '\'' { + // Is is an escaped single quote. + s = append(s, '\'') + skip(parser) + skip(parser) + } else if single && parser.buffer[parser.buffer_pos] == '\'' { + /* Check for the right quote. */ + break + } else if !single && parser.buffer[parser.buffer_pos] == '"' { + /* Check for the right quote. */ + break + } else if !single && parser.buffer[parser.buffer_pos] == '\\' && + is_break_at(parser.buffer, parser.buffer_pos+1) { + + /* Check for an escaped line break. */ + if !cache(parser, 3) { + return false + } + + skip(parser) + skip_line(parser) + leading_blanks = true + break + } else if !single && parser.buffer[parser.buffer_pos] == '\\' { + + /* Check for an escape sequence. */ + + code_length := 0 + + /* Check the escape character. */ + + switch parser.buffer[parser.buffer_pos+1] { + case '0': + s = append(s, 0) + case 'a': + s = append(s, '\x07') + case 'b': + s = append(s, '\x08') + case 't', '\t': + s = append(s, '\x09') + case 'n': + s = append(s, '\x0A') + case 'v': + s = append(s, '\x0B') + case 'f': + s = append(s, '\x0C') + case 'r': + s = append(s, '\x0D') + case 'e': + s = append(s, '\x1B') + case ' ': + s = append(s, '\x20') + case '"': + s = append(s, '"') + case '/': + s = append(s, '/') + case '\\': + s = append(s, '\\') + case 'N': /* NEL (#x85) */ + s = append(s, '\xC2') + s = append(s, '\x85') + case '_': /* #xA0 */ + s = append(s, '\xC2') + s = append(s, '\xA0') + case 'L': /* LS (#x2028) */ + s = append(s, '\xE2') + s = append(s, '\x80') + s = append(s, '\xA8') + case 'P': /* PS (#x2029) */ + s = append(s, '\xE2') + s = append(s, '\x80') + s = append(s, '\xA9') + case 'x': + code_length = 2 + case 'u': + code_length = 4 + case 'U': + code_length = 8 + default: + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "found unknown escape character") + return false + } + + skip(parser) + skip(parser) + + /* Consume an arbitrary escape code. */ + + if code_length > 0 { + value := 0 + + /* Scan the character value. */ + + if !cache(parser, code_length) { + return false + } + + for k := 0; k < code_length; k++ { + if !is_hex(parser.buffer[parser.buffer_pos+k]) { + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "did not find expected hexdecimal number") + return false + } + value = (value << 4) + as_hex(parser.buffer[parser.buffer_pos+k]) + } + + /* Check the value and write the character. */ + + if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "found invalid Unicode character escape code") + return false + } + + if value <= 0x7F { + s = append(s, byte(value)) + } else if value <= 0x7FF { + s = append(s, byte(0xC0+(value>>6))) + s = append(s, byte(0x80+(value&0x3F))) + } else if value <= 0xFFFF { + s = append(s, byte(0xE0+(value>>12))) + s = append(s, byte(0x80+((value>>6)&0x3F))) + s = append(s, byte(0x80+(value&0x3F))) + } else { + s = append(s, byte(0xF0+(value>>18))) + s = append(s, byte(0x80+((value>>12)&0x3F))) + s = append(s, byte(0x80+((value>>6)&0x3F))) + s = append(s, byte(0x80+(value&0x3F))) + } + + /* Advance the pointer. */ + + for k := 0; k < code_length; k++ { + skip(parser) + } + } + } else { + /* It is a non-escaped non-blank character. */ + + s = read(parser, s) + } + + if !cache(parser, 2) { + return false + } + } + + /* Check if we are at the end of the scalar. */ + b := parser.buffer[parser.buffer_pos] + if single { + if b == '\'' { + break + } + } else if b == '"' { + break + } + + /* Consume blank characters. */ + + if !cache(parser, 1) { + return false + } + + for is_blank(parser.buffer[parser.buffer_pos]) || is_break_at(parser.buffer, parser.buffer_pos) { + if is_blank(parser.buffer[parser.buffer_pos]) { + /* Consume a space or a tab character. */ + if !leading_blanks { + whitespaces = read(parser, whitespaces) + } else { + skip(parser) + } + } else { + if !cache(parser, 2) { + return false + } + + /* Check if it is a first line break. */ + if !leading_blanks { + whitespaces = whitespaces[:0] + leading_break = read_line(parser, leading_break) + leading_blanks = true + } else { + trailing_breaks = read_line(parser, trailing_breaks) + } + } + + if !cache(parser, 1) { + return false + } + } + + /* Join the whitespaces or fold line breaks. */ + + if leading_blanks { + /* Do we need to fold line breaks? */ + + if len(leading_break) > 0 && leading_break[0] == '\n' { + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } else { + s = append(s, trailing_breaks...) + trailing_breaks = trailing_breaks[:0] + } + + leading_break = leading_break[:0] + } else { + s = append(s, leading_break...) + s = append(s, trailing_breaks...) + leading_break = leading_break[:0] + trailing_breaks = trailing_breaks[:0] + } + } else { + s = append(s, whitespaces...) + whitespaces = whitespaces[:0] + } + } + + /* Eat the right quote. */ + + skip(parser) + + end_mark := parser.mark + + /* Create a token. */ + + *token = yaml_token_t{ + token_type: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_SINGLE_QUOTED_SCALAR_STYLE, + } + if !single { + token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + + return true +} + +/* + * Scan a plain scalar. + */ + +func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { + var s []byte + var leading_break []byte + var trailing_breaks []byte + var whitespaces []byte + leading_blanks := false + indent := parser.indent + 1 + + start_mark := parser.mark + end_mark := parser.mark + + /* Consume the content of the plain scalar. */ + + for { + /* Check for a document indicator. */ + + if !cache(parser, 4) { + return false + } + + if parser.mark.column == 0 && + ((parser.buffer[parser.buffer_pos] == '-' && + parser.buffer[parser.buffer_pos+1] == '-' && + parser.buffer[parser.buffer_pos+2] == '-') || + (parser.buffer[parser.buffer_pos] == '.' && + parser.buffer[parser.buffer_pos+1] == '.' && + parser.buffer[parser.buffer_pos+2] == '.')) && + is_blankz_at(parser.buffer, parser.buffer_pos+3) { + break + } + + /* Check for a comment. */ + + if parser.buffer[parser.buffer_pos] == '#' { + break + } + + /* Consume non-blank characters. */ + + for !is_blankz_at(parser.buffer, parser.buffer_pos) { + /* Check for 'x:x' in the flow context. TODO: Fix the test "spec-08-13". */ + + if parser.flow_level > 0 && + parser.buffer[parser.buffer_pos] == ':' && + !is_blankz_at(parser.buffer, parser.buffer_pos+1) { + yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", + start_mark, "found unexpected ':'") + return false + } + + /* Check for indicators that may end a plain scalar. */ + b := parser.buffer[parser.buffer_pos] + if (b == ':' && is_blankz_at(parser.buffer, parser.buffer_pos+1)) || + (parser.flow_level > 0 && + (b == ',' || b == ':' || + b == '?' || b == '[' || + b == ']' || b == '{' || + b == '}')) { + break + } + + /* Check if we need to join whitespaces and breaks. */ + + if leading_blanks || len(whitespaces) > 0 { + if leading_blanks { + /* Do we need to fold line breaks? */ + + if leading_break[0] == '\n' { + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } else { + s = append(s, trailing_breaks...) + trailing_breaks = trailing_breaks[:0] + } + leading_break = leading_break[:0] + } else { + s = append(s, leading_break...) + s = append(s, trailing_breaks...) + leading_break = leading_break[:0] + trailing_breaks = trailing_breaks[:0] + } + + leading_blanks = false + } else { + s = append(s, whitespaces...) + whitespaces = whitespaces[:0] + } + } + + /* Copy the character. */ + + s = read(parser, s) + end_mark = parser.mark + + if !cache(parser, 2) { + return false + } + } + + /* Is it the end? */ + + if !(is_blank(parser.buffer[parser.buffer_pos]) || + is_break_at(parser.buffer, parser.buffer_pos)) { + break + } + + /* Consume blank characters. */ + + if !cache(parser, 1) { + return false + } + + for is_blank(parser.buffer[parser.buffer_pos]) || + is_break_at(parser.buffer, parser.buffer_pos) { + + if is_blank(parser.buffer[parser.buffer_pos]) { + /* Check for tab character that abuse indentation. */ + + if leading_blanks && parser.mark.column < indent && + is_tab(parser.buffer[parser.buffer_pos]) { + yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", + start_mark, "found a tab character that violate indentation") + return false + } + + /* Consume a space or a tab character. */ + + if !leading_blanks { + whitespaces = read(parser, whitespaces) + } else { + skip(parser) + } + } else { + if !cache(parser, 2) { + return false + } + + /* Check if it is a first line break. */ + + if !leading_blanks { + whitespaces = whitespaces[:0] + leading_break = read_line(parser, leading_break) + leading_blanks = true + } else { + trailing_breaks = read_line(parser, trailing_breaks) + } + } + if !cache(parser, 1) { + return false + } + } + + /* Check indentation level. */ + + if parser.flow_level == 0 && parser.mark.column < indent { + break + } + } + + /* Create a token. */ + + *token = yaml_token_t{ + token_type: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_PLAIN_SCALAR_STYLE, + } + + /* Note that we change the 'simple_key_allowed' flag. */ + + if leading_blanks { + parser.simple_key_allowed = true + } + + return true +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/scanner_test.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/scanner_test.go new file mode 100644 index 0000000..db31e65 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/scanner_test.go @@ -0,0 +1,80 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "io/ioutil" + "os" + "path/filepath" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var scan = func(filename string) { + It("scan "+filename, func() { + file, err := os.Open(filename) + Expect(err).To(BeNil()) + + parser := yaml_parser_t{} + yaml_parser_initialize(&parser) + yaml_parser_set_input_reader(&parser, file) + + failed := false + token := yaml_token_t{} + + for { + if !yaml_parser_scan(&parser, &token) { + failed = true + break + } + + if token.token_type == yaml_STREAM_END_TOKEN { + break + } + } + + file.Close() + + // msg := "SUCCESS" + // if failed { + // msg = "FAILED" + // if parser.error != yaml_NO_ERROR { + // m := parser.problem_mark + // fmt.Printf("ERROR: (%s) %s @ line: %d col: %d\n", + // parser.context, parser.problem, m.line, m.column) + // } + // } + Expect(failed).To(BeFalse()) + }) +} + +var scanYamls = func(dirname string) { + fileInfos, err := ioutil.ReadDir(dirname) + if err != nil { + panic(err.Error()) + } + + for _, fileInfo := range fileInfos { + if !fileInfo.IsDir() { + scan(filepath.Join(dirname, fileInfo.Name())) + } + } +} + +var _ = Describe("Scanner", func() { + scanYamls("fixtures/specification") + scanYamls("fixtures/specification/types") +}) diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/tags.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/tags.go new file mode 100644 index 0000000..f153aee --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/tags.go @@ -0,0 +1,360 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "reflect" + "sort" + "strings" + "sync" + "unicode" +) + +// A field represents a single field found in a struct. +type field struct { + name string + tag bool + index []int + typ reflect.Type + omitEmpty bool + flow bool +} + +// byName sorts field by name, breaking ties with depth, +// then breaking ties with "name came from json tag", then +// breaking ties with index sequence. +type byName []field + +func (x byName) Len() int { return len(x) } + +func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byName) Less(i, j int) bool { + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].index) != len(x[j].index) { + return len(x[i].index) < len(x[j].index) + } + if x[i].tag != x[j].tag { + return x[i].tag + } + return byIndex(x).Less(i, j) +} + +// byIndex sorts field by index sequence. +type byIndex []field + +func (x byIndex) Len() int { return len(x) } + +func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byIndex) Less(i, j int) bool { + for k, xik := range x[i].index { + if k >= len(x[j].index) { + return false + } + if xik != x[j].index[k] { + return xik < x[j].index[k] + } + } + return len(x[i].index) < len(x[j].index) +} + +// typeFields returns a list of fields that JSON should recognize for the given type. +// The algorithm is breadth-first search over the set of structs to include - the top struct +// and then any reachable anonymous structs. +func typeFields(t reflect.Type) []field { + // Anonymous fields to explore at the current level and the next. + current := []field{} + next := []field{{typ: t}} + + // Count of queued names for current level and the next. + count := map[reflect.Type]int{} + nextCount := map[reflect.Type]int{} + + // Types already visited at an earlier level. + visited := map[reflect.Type]bool{} + + // Fields found. + var fields []field + + for len(next) > 0 { + current, next = next, current[:0] + count, nextCount = nextCount, map[reflect.Type]int{} + + for _, f := range current { + if visited[f.typ] { + continue + } + visited[f.typ] = true + + // Scan f.typ for fields to include. + for i := 0; i < f.typ.NumField(); i++ { + sf := f.typ.Field(i) + if sf.PkgPath != "" { // unexported + continue + } + tag := sf.Tag.Get("yaml") + if tag == "-" { + continue + } + name, opts := parseTag(tag) + if !isValidTag(name) { + name = "" + } + index := make([]int, len(f.index)+1) + copy(index, f.index) + index[len(f.index)] = i + + ft := sf.Type + if ft.Name() == "" && ft.Kind() == reflect.Ptr { + // Follow pointer. + ft = ft.Elem() + } + + // Record found field and index sequence. + if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { + tagged := name != "" + if name == "" { + name = sf.Name + } + fields = append(fields, field{name, tagged, index, ft, + opts.Contains("omitempty"), opts.Contains("flow")}) + if count[f.typ] > 1 { + // If there were multiple instances, add a second, + // so that the annihilation code will see a duplicate. + // It only cares about the distinction between 1 or 2, + // so don't bother generating any more copies. + fields = append(fields, fields[len(fields)-1]) + } + continue + } + + // Record new anonymous struct to explore in next round. + nextCount[ft]++ + if nextCount[ft] == 1 { + next = append(next, field{name: ft.Name(), index: index, typ: ft}) + } + } + } + } + + sort.Sort(byName(fields)) + + // Delete all fields that are hidden by the Go rules for embedded fields, + // except that fields with JSON tags are promoted. + + // The fields are sorted in primary order of name, secondary order + // of field index length. Loop over names; for each name, delete + // hidden fields by choosing the one dominant field that survives. + out := fields[:0] + for advance, i := 0, 0; i < len(fields); i += advance { + // One iteration per name. + // Find the sequence of fields with the name of this first field. + fi := fields[i] + name := fi.name + for advance = 1; i+advance < len(fields); advance++ { + fj := fields[i+advance] + if fj.name != name { + break + } + } + if advance == 1 { // Only one field with this name + out = append(out, fi) + continue + } + dominant, ok := dominantField(fields[i : i+advance]) + if ok { + out = append(out, dominant) + } + } + + fields = out + sort.Sort(byIndex(fields)) + + return fields +} + +// dominantField looks through the fields, all of which are known to +// have the same name, to find the single field that dominates the +// others using Go's embedding rules, modified by the presence of +// JSON tags. If there are multiple top-level fields, the boolean +// will be false: This condition is an error in Go and we skip all +// the fields. +func dominantField(fields []field) (field, bool) { + // The fields are sorted in increasing index-length order. The winner + // must therefore be one with the shortest index length. Drop all + // longer entries, which is easy: just truncate the slice. + length := len(fields[0].index) + tagged := -1 // Index of first tagged field. + for i, f := range fields { + if len(f.index) > length { + fields = fields[:i] + break + } + if f.tag { + if tagged >= 0 { + // Multiple tagged fields at the same level: conflict. + // Return no field. + return field{}, false + } + tagged = i + } + } + if tagged >= 0 { + return fields[tagged], true + } + // All remaining fields have the same length. If there's more than one, + // we have a conflict (two fields named "X" at the same level) and we + // return no field. + if len(fields) > 1 { + return field{}, false + } + return fields[0], true +} + +var fieldCache struct { + sync.RWMutex + m map[reflect.Type][]field +} + +// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. +func cachedTypeFields(t reflect.Type) []field { + fieldCache.RLock() + f := fieldCache.m[t] + fieldCache.RUnlock() + if f != nil { + return f + } + + // Compute fields without lock. + // Might duplicate effort but won't hold other computations back. + f = typeFields(t) + if f == nil { + f = []field{} + } + + fieldCache.Lock() + if fieldCache.m == nil { + fieldCache.m = map[reflect.Type][]field{} + } + fieldCache.m[t] = f + fieldCache.Unlock() + return f +} + +// tagOptions is the string following a comma in a struct field's "json" +// tag, or the empty string. It does not include the leading comma. +type tagOptions string + +func isValidTag(s string) bool { + if s == "" { + return false + } + for _, c := range s { + switch { + case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): + // Backslash and quote chars are reserved, but + // otherwise any punctuation chars are allowed + // in a tag name. + default: + if !unicode.IsLetter(c) && !unicode.IsDigit(c) { + return false + } + } + } + return true +} + +func fieldByIndex(v reflect.Value, index []int) reflect.Value { + for _, i := range index { + if v.Kind() == reflect.Ptr { + if v.IsNil() { + return reflect.Value{} + } + v = v.Elem() + } + v = v.Field(i) + } + return v +} + +func typeByIndex(t reflect.Type, index []int) reflect.Type { + for _, i := range index { + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + t = t.Field(i).Type + } + return t +} + +// stringValues is a slice of reflect.Value holding *reflect.StringValue. +// It implements the methods to sort by string. +type stringValues []reflect.Value + +func (sv stringValues) Len() int { return len(sv) } +func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] } +func (sv stringValues) Less(i, j int) bool { + av, ak := getElem(sv[i]) + bv, bk := getElem(sv[j]) + if ak == reflect.String && bk == reflect.String { + return av.String() < bv.String() + } + + return ak < bk +} + +func getElem(v reflect.Value) (reflect.Value, reflect.Kind) { + k := v.Kind() + for k == reflect.Interface || k == reflect.Ptr && !v.IsNil() { + v = v.Elem() + k = v.Kind() + } + + return v, k +} + +// parseTag splits a struct field's json tag into its name and +// comma-separated options. +func parseTag(tag string) (string, tagOptions) { + if idx := strings.Index(tag, ","); idx != -1 { + return tag[:idx], tagOptions(tag[idx+1:]) + } + return tag, tagOptions("") +} + +// Contains reports whether a comma-separated list of options +// contains a particular substr flag. substr must be surrounded by a +// string boundary or commas. +func (o tagOptions) Contains(optionName string) bool { + if len(o) == 0 { + return false + } + s := string(o) + for s != "" { + var next string + i := strings.Index(s, ",") + if i >= 0 { + s, next = s[:i], s[i+1:] + } + if s == optionName { + return true + } + s = next + } + return false +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/writer.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/writer.go new file mode 100644 index 0000000..a76b633 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/writer.go @@ -0,0 +1,128 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +/* + * Set the writer error and return 0. + */ + +func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { + emitter.error = yaml_WRITER_ERROR + emitter.problem = problem + + return false +} + +/* + * Flush the output buffer. + */ + +func yaml_emitter_flush(emitter *yaml_emitter_t) bool { + if emitter.write_handler == nil { + panic("Write handler must be set") /* Write handler must be set. */ + } + if emitter.encoding == yaml_ANY_ENCODING { + panic("Encoding must be set") /* Output encoding must be set. */ + } + + /* Check if the buffer is empty. */ + + if emitter.buffer_pos == 0 { + return true + } + + /* If the output encoding is UTF-8, we don't need to recode the buffer. */ + + if emitter.encoding == yaml_UTF8_ENCODING { + if err := emitter.write_handler(emitter, + emitter.buffer[:emitter.buffer_pos]); err != nil { + return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) + } + emitter.buffer_pos = 0 + return true + } + + /* Recode the buffer into the raw buffer. */ + + var low, high int + if emitter.encoding == yaml_UTF16LE_ENCODING { + low, high = 0, 1 + } else { + high, low = 1, 0 + } + + pos := 0 + for pos < emitter.buffer_pos { + + /* + * See the "reader.c" code for more details on UTF-8 encoding. Note + * that we assume that the buffer contains a valid UTF-8 sequence. + */ + + /* Read the next UTF-8 character. */ + + octet := emitter.buffer[pos] + + var w int + var value rune + switch { + case octet&0x80 == 0x00: + w, value = 1, rune(octet&0x7F) + case octet&0xE0 == 0xC0: + w, value = 2, rune(octet&0x1F) + case octet&0xF0 == 0xE0: + w, value = 3, rune(octet&0x0F) + case octet&0xF8 == 0xF0: + w, value = 4, rune(octet&0x07) + } + + for k := 1; k < w; k++ { + octet = emitter.buffer[pos+k] + value = (value << 6) + (rune(octet) & 0x3F) + } + + pos += w + + /* Write the character. */ + + if value < 0x10000 { + var b [2]byte + b[high] = byte(value >> 8) + b[low] = byte(value & 0xFF) + emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1]) + } else { + /* Write the character using a surrogate pair (check "reader.c"). */ + + var b [4]byte + value -= 0x10000 + b[high] = byte(0xD8 + (value >> 18)) + b[low] = byte((value >> 10) & 0xFF) + b[high+2] = byte(0xDC + ((value >> 8) & 0xFF)) + b[low+2] = byte(value & 0xFF) + emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3]) + } + } + + /* Write the raw buffer. */ + + // Write the raw buffer. + if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil { + return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) + } + + emitter.buffer_pos = 0 + emitter.raw_buffer = emitter.raw_buffer[:0] + return true +} diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/yaml_definesh.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/yaml_definesh.go new file mode 100644 index 0000000..de4c05a --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/yaml_definesh.go @@ -0,0 +1,22 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +const ( + yaml_VERSION_MAJOR = 0 + yaml_VERSION_MINOR = 1 + yaml_VERSION_PATCH = 6 + yaml_VERSION_STRING = "0.1.6" +) diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/yaml_privateh.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/yaml_privateh.go new file mode 100644 index 0000000..2b3b7d7 --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/yaml_privateh.go @@ -0,0 +1,891 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +const ( + INPUT_RAW_BUFFER_SIZE = 1024 + + /* + * The size of the input buffer. + * + * It should be possible to decode the whole raw buffer. + */ + INPUT_BUFFER_SIZE = (INPUT_RAW_BUFFER_SIZE * 3) + + /* + * The size of the output buffer. + */ + + OUTPUT_BUFFER_SIZE = 512 + + /* + * The size of the output raw buffer. + * + * It should be possible to encode the whole output buffer. + */ + + OUTPUT_RAW_BUFFER_SIZE = (OUTPUT_BUFFER_SIZE*2 + 2) + + INITIAL_STACK_SIZE = 16 + INITIAL_QUEUE_SIZE = 16 +) + +func width(b byte) int { + if b&0x80 == 0 { + return 1 + } + + if b&0xE0 == 0xC0 { + return 2 + } + + if b&0xF0 == 0xE0 { + return 3 + } + + if b&0xF8 == 0xF0 { + return 4 + } + + return 0 +} + +func copy_bytes(dest []byte, dest_pos *int, src []byte, src_pos *int) { + w := width(src[*src_pos]) + switch w { + case 4: + dest[*dest_pos+3] = src[*src_pos+3] + fallthrough + case 3: + dest[*dest_pos+2] = src[*src_pos+2] + fallthrough + case 2: + dest[*dest_pos+1] = src[*src_pos+1] + fallthrough + case 1: + dest[*dest_pos] = src[*src_pos] + default: + panic("invalid width") + } + *dest_pos += w + *src_pos += w +} + +// /* +// * Check if the character at the specified position is an alphabetical +// * character, a digit, '_', or '-'. +// */ + +func is_alpha(b byte) bool { + return (b >= '0' && b <= '9') || + (b >= 'A' && b <= 'Z') || + (b >= 'a' && b <= 'z') || + b == '_' || b == '-' +} + +// /* +// * Check if the character at the specified position is a digit. +// */ +// +func is_digit(b byte) bool { + return b >= '0' && b <= '9' +} + +// /* +// * Get the value of a digit. +// */ +// +func as_digit(b byte) int { + return int(b) - '0' +} + +// /* +// * Check if the character at the specified position is a hex-digit. +// */ +// +func is_hex(b byte) bool { + return (b >= '0' && b <= '9') || + (b >= 'A' && b <= 'F') || + (b >= 'a' && b <= 'f') +} + +// +// /* +// * Get the value of a hex-digit. +// */ +// +func as_hex(b byte) int { + if b >= 'A' && b <= 'F' { + return int(b) - 'A' + 10 + } else if b >= 'a' && b <= 'f' { + return int(b) - 'a' + 10 + } + return int(b) - '0' +} + +// #define AS_HEX_AT(string,offset) \ +// (((string).pointer[offset] >= (yaml_char_t) 'A' && \ +// (string).pointer[offset] <= (yaml_char_t) 'F') ? \ +// ((string).pointer[offset] - (yaml_char_t) 'A' + 10) : \ +// ((string).pointer[offset] >= (yaml_char_t) 'a' && \ +// (string).pointer[offset] <= (yaml_char_t) 'f') ? \ +// ((string).pointer[offset] - (yaml_char_t) 'a' + 10) : \ +// ((string).pointer[offset] - (yaml_char_t) '0')) + +// /* +// * Check if the character is a line break, space, tab, or NUL. +// */ +func is_blankz_at(b []byte, i int) bool { + return is_blank(b[i]) || is_breakz_at(b, i) +} + +// /* +// * Check if the character at the specified position is a line break. +// */ +func is_break_at(b []byte, i int) bool { + return b[i] == '\r' || /* CR (#xD)*/ + b[i] == '\n' || /* LF (#xA) */ + (b[i] == 0xC2 && b[i+1] == 0x85) || /* NEL (#x85) */ + (b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8) || /* LS (#x2028) */ + (b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) /* PS (#x2029) */ +} + +func is_breakz_at(b []byte, i int) bool { + return is_break_at(b, i) || is_z(b[i]) +} + +func is_crlf_at(b []byte, i int) bool { + return b[i] == '\r' && b[i+1] == '\n' +} + +// /* +// * Check if the character at the specified position is NUL. +// */ +func is_z(b byte) bool { + return b == 0x0 +} + +// /* +// * Check if the character at the specified position is space. +// */ +func is_space(b byte) bool { + return b == ' ' +} + +// +// /* +// * Check if the character at the specified position is tab. +// */ +func is_tab(b byte) bool { + return b == '\t' +} + +// /* +// * Check if the character at the specified position is blank (space or tab). +// */ +func is_blank(b byte) bool { + return is_space(b) || is_tab(b) +} + +// /* +// * Check if the character is ASCII. +// */ +func is_ascii(b byte) bool { + return b <= '\x7f' +} + +// /* +// * Check if the character can be printed unescaped. +// */ +func is_printable_at(b []byte, i int) bool { + return ((b[i] == 0x0A) || /* . == #x0A */ + (b[i] >= 0x20 && b[i] <= 0x7E) || /* #x20 <= . <= #x7E */ + (b[i] == 0xC2 && b[i+1] >= 0xA0) || /* #0xA0 <= . <= #xD7FF */ + (b[i] > 0xC2 && b[i] < 0xED) || + (b[i] == 0xED && b[i+1] < 0xA0) || + (b[i] == 0xEE) || + (b[i] == 0xEF && /* && . != #xFEFF */ + !(b[i+1] == 0xBB && b[i+2] == 0xBF) && + !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) +} + +func insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { + // collapse the slice + if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { + if parser.tokens_head != len(parser.tokens) { + // move the tokens down + copy(parser.tokens, parser.tokens[parser.tokens_head:]) + } + // readjust the length + parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] + parser.tokens_head = 0 + } + + parser.tokens = append(parser.tokens, *token) + if pos < 0 { + return + } + copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) + parser.tokens[parser.tokens_head+pos] = *token +} + +// /* +// * Check if the character at the specified position is BOM. +// */ +// +func is_bom_at(b []byte, i int) bool { + return b[i] == 0xEF && b[i+1] == 0xBB && b[i+2] == 0xBF +} + +// +// #ifdef HAVE_CONFIG_H +// #include +// #endif +// +// #include "./yaml.h" +// +// #include +// #include +// +// /* +// * Memory management. +// */ +// +// yaml_DECLARE(void *) +// yaml_malloc(size_t size); +// +// yaml_DECLARE(void *) +// yaml_realloc(void *ptr, size_t size); +// +// yaml_DECLARE(void) +// yaml_free(void *ptr); +// +// yaml_DECLARE(yaml_char_t *) +// yaml_strdup(const yaml_char_t *); +// +// /* +// * Reader: Ensure that the buffer contains at least `length` characters. +// */ +// +// yaml_DECLARE(int) +// yaml_parser_update_buffer(yaml_parser_t *parser, size_t length); +// +// /* +// * Scanner: Ensure that the token stack contains at least one token ready. +// */ +// +// yaml_DECLARE(int) +// yaml_parser_fetch_more_tokens(yaml_parser_t *parser); +// +// /* +// * The size of the input raw buffer. +// */ +// +// #define INPUT_RAW_BUFFER_SIZE 16384 +// +// /* +// * The size of the input buffer. +// * +// * It should be possible to decode the whole raw buffer. +// */ +// +// #define INPUT_BUFFER_SIZE (INPUT_RAW_BUFFER_SIZE*3) +// +// /* +// * The size of the output buffer. +// */ +// +// #define OUTPUT_BUFFER_SIZE 16384 +// +// /* +// * The size of the output raw buffer. +// * +// * It should be possible to encode the whole output buffer. +// */ +// +// #define OUTPUT_RAW_BUFFER_SIZE (OUTPUT_BUFFER_SIZE*2+2) +// +// /* +// * The size of other stacks and queues. +// */ +// +// #define INITIAL_STACK_SIZE 16 +// #define INITIAL_QUEUE_SIZE 16 +// #define INITIAL_STRING_SIZE 16 +// +// /* +// * Buffer management. +// */ +// +// #define BUFFER_INIT(context,buffer,size) \ +// (((buffer).start = yaml_malloc(size)) ? \ +// ((buffer).last = (buffer).pointer = (buffer).start, \ +// (buffer).end = (buffer).start+(size), \ +// 1) : \ +// ((context)->error = yaml_MEMORY_ERROR, \ +// 0)) +// +// #define BUFFER_DEL(context,buffer) \ +// (yaml_free((buffer).start), \ +// (buffer).start = (buffer).pointer = (buffer).end = 0) +// +// /* +// * String management. +// */ +// +// typedef struct { +// yaml_char_t *start; +// yaml_char_t *end; +// yaml_char_t *pointer; +// } yaml_string_t; +// +// yaml_DECLARE(int) +// yaml_string_extend(yaml_char_t **start, +// yaml_char_t **pointer, yaml_char_t **end); +// +// yaml_DECLARE(int) +// yaml_string_join( +// yaml_char_t **a_start, yaml_char_t **a_pointer, yaml_char_t **a_end, +// yaml_char_t **b_start, yaml_char_t **b_pointer, yaml_char_t **b_end); +// +// #define NULL_STRING { NULL, NULL, NULL } +// +// #define STRING(string,length) { (string), (string)+(length), (string) } +// +// #define STRING_ASSIGN(value,string,length) \ +// ((value).start = (string), \ +// (value).end = (string)+(length), \ +// (value).pointer = (string)) +// +// #define STRING_INIT(context,string,size) \ +// (((string).start = yaml_malloc(size)) ? \ +// ((string).pointer = (string).start, \ +// (string).end = (string).start+(size), \ +// memset((string).start, 0, (size)), \ +// 1) : \ +// ((context)->error = yaml_MEMORY_ERROR, \ +// 0)) +// +// #define STRING_DEL(context,string) \ +// (yaml_free((string).start), \ +// (string).start = (string).pointer = (string).end = 0) +// +// #define STRING_EXTEND(context,string) \ +// (((string).pointer+5 < (string).end) \ +// || yaml_string_extend(&(string).start, \ +// &(string).pointer, &(string).end)) +// +// #define CLEAR(context,string) \ +// ((string).pointer = (string).start, \ +// memset((string).start, 0, (string).end-(string).start)) +// +// #define JOIN(context,string_a,string_b) \ +// ((yaml_string_join(&(string_a).start, &(string_a).pointer, \ +// &(string_a).end, &(string_b).start, \ +// &(string_b).pointer, &(string_b).end)) ? \ +// ((string_b).pointer = (string_b).start, \ +// 1) : \ +// ((context)->error = yaml_MEMORY_ERROR, \ +// 0)) +// +// /* +// * String check operations. +// */ +// +// /* +// * Check the octet at the specified position. +// */ +// +// #define CHECK_AT(string,octet,offset) \ +// ((string).pointer[offset] == (yaml_char_t)(octet)) +// +// /* +// * Check the current octet in the buffer. +// */ +// +// #define CHECK(string,octet) CHECK_AT((string),(octet),0) +// +// /* +// * Check if the character at the specified position is an alphabetical +// * character, a digit, '_', or '-'. +// */ +// +// #define IS_ALPHA_AT(string,offset) \ +// (((string).pointer[offset] >= (yaml_char_t) '0' && \ +// (string).pointer[offset] <= (yaml_char_t) '9') || \ +// ((string).pointer[offset] >= (yaml_char_t) 'A' && \ +// (string).pointer[offset] <= (yaml_char_t) 'Z') || \ +// ((string).pointer[offset] >= (yaml_char_t) 'a' && \ +// (string).pointer[offset] <= (yaml_char_t) 'z') || \ +// (string).pointer[offset] == '_' || \ +// (string).pointer[offset] == '-') +// +// #define IS_ALPHA(string) IS_ALPHA_AT((string),0) +// +// /* +// * Check if the character at the specified position is a digit. +// */ +// +// #define IS_DIGIT_AT(string,offset) \ +// (((string).pointer[offset] >= (yaml_char_t) '0' && \ +// (string).pointer[offset] <= (yaml_char_t) '9')) +// +// #define IS_DIGIT(string) IS_DIGIT_AT((string),0) +// +// /* +// * Get the value of a digit. +// */ +// +// #define AS_DIGIT_AT(string,offset) \ +// ((string).pointer[offset] - (yaml_char_t) '0') +// +// #define AS_DIGIT(string) AS_DIGIT_AT((string),0) +// +// /* +// * Check if the character at the specified position is a hex-digit. +// */ +// +// #define IS_HEX_AT(string,offset) \ +// (((string).pointer[offset] >= (yaml_char_t) '0' && \ +// (string).pointer[offset] <= (yaml_char_t) '9') || \ +// ((string).pointer[offset] >= (yaml_char_t) 'A' && \ +// (string).pointer[offset] <= (yaml_char_t) 'F') || \ +// ((string).pointer[offset] >= (yaml_char_t) 'a' && \ +// (string).pointer[offset] <= (yaml_char_t) 'f')) +// +// #define IS_HEX(string) IS_HEX_AT((string),0) +// +// /* +// * Get the value of a hex-digit. +// */ +// +// #define AS_HEX_AT(string,offset) \ +// (((string).pointer[offset] >= (yaml_char_t) 'A' && \ +// (string).pointer[offset] <= (yaml_char_t) 'F') ? \ +// ((string).pointer[offset] - (yaml_char_t) 'A' + 10) : \ +// ((string).pointer[offset] >= (yaml_char_t) 'a' && \ +// (string).pointer[offset] <= (yaml_char_t) 'f') ? \ +// ((string).pointer[offset] - (yaml_char_t) 'a' + 10) : \ +// ((string).pointer[offset] - (yaml_char_t) '0')) +// +// #define AS_HEX(string) AS_HEX_AT((string),0) +// +// /* +// * Check if the character is ASCII. +// */ +// +// #define IS_ASCII_AT(string,offset) \ +// ((string).pointer[offset] <= (yaml_char_t) '\x7F') +// +// #define IS_ASCII(string) IS_ASCII_AT((string),0) +// +// /* +// * Check if the character can be printed unescaped. +// */ +// +// #define IS_PRINTABLE_AT(string,offset) \ +// (((string).pointer[offset] == 0x0A) /* . == #x0A */ \ +// || ((string).pointer[offset] >= 0x20 /* #x20 <= . <= #x7E */ \ +// && (string).pointer[offset] <= 0x7E) \ +// || ((string).pointer[offset] == 0xC2 /* #0xA0 <= . <= #xD7FF */ \ +// && (string).pointer[offset+1] >= 0xA0) \ +// || ((string).pointer[offset] > 0xC2 \ +// && (string).pointer[offset] < 0xED) \ +// || ((string).pointer[offset] == 0xED \ +// && (string).pointer[offset+1] < 0xA0) \ +// || ((string).pointer[offset] == 0xEE) \ +// || ((string).pointer[offset] == 0xEF /* #xE000 <= . <= #xFFFD */ \ +// && !((string).pointer[offset+1] == 0xBB /* && . != #xFEFF */ \ +// && (string).pointer[offset+2] == 0xBF) \ +// && !((string).pointer[offset+1] == 0xBF \ +// && ((string).pointer[offset+2] == 0xBE \ +// || (string).pointer[offset+2] == 0xBF)))) +// +// #define IS_PRINTABLE(string) IS_PRINTABLE_AT((string),0) +// +// /* +// * Check if the character at the specified position is NUL. +// */ +// +// #define IS_Z_AT(string,offset) CHECK_AT((string),'\0',(offset)) +// +// #define IS_Z(string) IS_Z_AT((string),0) +// +// /* +// * Check if the character at the specified position is BOM. +// */ +// +// #define IS_BOM_AT(string,offset) \ +// (CHECK_AT((string),'\xEF',(offset)) \ +// && CHECK_AT((string),'\xBB',(offset)+1) \ +// && CHECK_AT((string),'\xBF',(offset)+2)) /* BOM (#xFEFF) */ +// +// #define IS_BOM(string) IS_BOM_AT(string,0) +// +// /* +// * Check if the character at the specified position is space. +// */ +// +// #define IS_SPACE_AT(string,offset) CHECK_AT((string),' ',(offset)) +// +// #define IS_SPACE(string) IS_SPACE_AT((string),0) +// +// /* +// * Check if the character at the specified position is tab. +// */ +// +// #define IS_TAB_AT(string,offset) CHECK_AT((string),'\t',(offset)) +// +// #define IS_TAB(string) IS_TAB_AT((string),0) +// +// /* +// * Check if the character at the specified position is blank (space or tab). +// */ +// +// #define IS_BLANK_AT(string,offset) \ +// (IS_SPACE_AT((string),(offset)) || IS_TAB_AT((string),(offset))) +// +// #define IS_BLANK(string) IS_BLANK_AT((string),0) +// +// /* +// * Check if the character at the specified position is a line break. +// */ +// +// #define IS_BREAK_AT(string,offset) \ +// (CHECK_AT((string),'\r',(offset)) /* CR (#xD)*/ \ +// || CHECK_AT((string),'\n',(offset)) /* LF (#xA) */ \ +// || (CHECK_AT((string),'\xC2',(offset)) \ +// && CHECK_AT((string),'\x85',(offset)+1)) /* NEL (#x85) */ \ +// || (CHECK_AT((string),'\xE2',(offset)) \ +// && CHECK_AT((string),'\x80',(offset)+1) \ +// && CHECK_AT((string),'\xA8',(offset)+2)) /* LS (#x2028) */ \ +// || (CHECK_AT((string),'\xE2',(offset)) \ +// && CHECK_AT((string),'\x80',(offset)+1) \ +// && CHECK_AT((string),'\xA9',(offset)+2))) /* PS (#x2029) */ +// +// #define IS_BREAK(string) IS_BREAK_AT((string),0) +// +// #define IS_CRLF_AT(string,offset) \ +// (CHECK_AT((string),'\r',(offset)) && CHECK_AT((string),'\n',(offset)+1)) +// +// #define IS_CRLF(string) IS_CRLF_AT((string),0) +// +// /* +// * Check if the character is a line break or NUL. +// */ +// +// #define IS_BREAKZ_AT(string,offset) \ +// (IS_BREAK_AT((string),(offset)) || IS_Z_AT((string),(offset))) +// +// #define IS_BREAKZ(string) IS_BREAKZ_AT((string),0) +// +// /* +// * Check if the character is a line break, space, or NUL. +// */ +// +// #define IS_SPACEZ_AT(string,offset) \ +// (IS_SPACE_AT((string),(offset)) || IS_BREAKZ_AT((string),(offset))) +// +// #define IS_SPACEZ(string) IS_SPACEZ_AT((string),0) +// +// /* +// * Check if the character is a line break, space, tab, or NUL. +// */ +// +// #define IS_BLANKZ_AT(string,offset) \ +// (IS_BLANK_AT((string),(offset)) || IS_BREAKZ_AT((string),(offset))) +// +// #define IS_BLANKZ(string) IS_BLANKZ_AT((string),0) +// +// /* +// * Determine the width of the character. +// */ +// +// #define WIDTH_AT(string,offset) \ +// (((string).pointer[offset] & 0x80) == 0x00 ? 1 : \ +// ((string).pointer[offset] & 0xE0) == 0xC0 ? 2 : \ +// ((string).pointer[offset] & 0xF0) == 0xE0 ? 3 : \ +// ((string).pointer[offset] & 0xF8) == 0xF0 ? 4 : 0) +// +// #define WIDTH(string) WIDTH_AT((string),0) +// +// /* +// * Move the string pointer to the next character. +// */ +// +// #define MOVE(string) ((string).pointer += WIDTH((string))) +// +// /* +// * Copy a character and move the pointers of both strings. +// */ +// +// #define COPY(string_a,string_b) \ +// ((*(string_b).pointer & 0x80) == 0x00 ? \ +// (*((string_a).pointer++) = *((string_b).pointer++)) : \ +// (*(string_b).pointer & 0xE0) == 0xC0 ? \ +// (*((string_a).pointer++) = *((string_b).pointer++), \ +// *((string_a).pointer++) = *((string_b).pointer++)) : \ +// (*(string_b).pointer & 0xF0) == 0xE0 ? \ +// (*((string_a).pointer++) = *((string_b).pointer++), \ +// *((string_a).pointer++) = *((string_b).pointer++), \ +// *((string_a).pointer++) = *((string_b).pointer++)) : \ +// (*(string_b).pointer & 0xF8) == 0xF0 ? \ +// (*((string_a).pointer++) = *((string_b).pointer++), \ +// *((string_a).pointer++) = *((string_b).pointer++), \ +// *((string_a).pointer++) = *((string_b).pointer++), \ +// *((string_a).pointer++) = *((string_b).pointer++)) : 0) +// +// /* +// * Stack and queue management. +// */ +// +// yaml_DECLARE(int) +// yaml_stack_extend(void **start, void **top, void **end); +// +// yaml_DECLARE(int) +// yaml_queue_extend(void **start, void **head, void **tail, void **end); +// +// #define STACK_INIT(context,stack,size) \ +// (((stack).start = yaml_malloc((size)*sizeof(*(stack).start))) ? \ +// ((stack).top = (stack).start, \ +// (stack).end = (stack).start+(size), \ +// 1) : \ +// ((context)->error = yaml_MEMORY_ERROR, \ +// 0)) +// +// #define STACK_DEL(context,stack) \ +// (yaml_free((stack).start), \ +// (stack).start = (stack).top = (stack).end = 0) +// +// #define STACK_EMPTY(context,stack) \ +// ((stack).start == (stack).top) +// +// #define PUSH(context,stack,value) \ +// (((stack).top != (stack).end \ +// || yaml_stack_extend((void **)&(stack).start, \ +// (void **)&(stack).top, (void **)&(stack).end)) ? \ +// (*((stack).top++) = value, \ +// 1) : \ +// ((context)->error = yaml_MEMORY_ERROR, \ +// 0)) +// +// #define POP(context,stack) \ +// (*(--(stack).top)) +// +// #define QUEUE_INIT(context,queue,size) \ +// (((queue).start = yaml_malloc((size)*sizeof(*(queue).start))) ? \ +// ((queue).head = (queue).tail = (queue).start, \ +// (queue).end = (queue).start+(size), \ +// 1) : \ +// ((context)->error = yaml_MEMORY_ERROR, \ +// 0)) +// +// #define QUEUE_DEL(context,queue) \ +// (yaml_free((queue).start), \ +// (queue).start = (queue).head = (queue).tail = (queue).end = 0) +// +// #define QUEUE_EMPTY(context,queue) \ +// ((queue).head == (queue).tail) +// +// #define ENQUEUE(context,queue,value) \ +// (((queue).tail != (queue).end \ +// || yaml_queue_extend((void **)&(queue).start, (void **)&(queue).head, \ +// (void **)&(queue).tail, (void **)&(queue).end)) ? \ +// (*((queue).tail++) = value, \ +// 1) : \ +// ((context)->error = yaml_MEMORY_ERROR, \ +// 0)) +// +// #define DEQUEUE(context,queue) \ +// (*((queue).head++)) +// +// #define QUEUE_INSERT(context,queue,index,value) \ +// (((queue).tail != (queue).end \ +// || yaml_queue_extend((void **)&(queue).start, (void **)&(queue).head, \ +// (void **)&(queue).tail, (void **)&(queue).end)) ? \ +// (memmove((queue).head+(index)+1,(queue).head+(index), \ +// ((queue).tail-(queue).head-(index))*sizeof(*(queue).start)), \ +// *((queue).head+(index)) = value, \ +// (queue).tail++, \ +// 1) : \ +// ((context)->error = yaml_MEMORY_ERROR, \ +// 0)) +// +// /* +// * Token initializers. +// */ +// +// #define TOKEN_INIT(token,token_type,token_start_mark,token_end_mark) \ +// (memset(&(token), 0, sizeof(yaml_token_t)), \ +// (token).type = (token_type), \ +// (token).start_mark = (token_start_mark), \ +// (token).end_mark = (token_end_mark)) +// +// #define STREAM_START_TOKEN_INIT(token,token_encoding,start_mark,end_mark) \ +// (TOKEN_INIT((token),yaml_STREAM_START_TOKEN,(start_mark),(end_mark)), \ +// (token).data.stream_start.encoding = (token_encoding)) +// +// #define STREAM_END_TOKEN_INIT(token,start_mark,end_mark) \ +// (TOKEN_INIT((token),yaml_STREAM_END_TOKEN,(start_mark),(end_mark))) +// +// #define ALIAS_TOKEN_INIT(token,token_value,start_mark,end_mark) \ +// (TOKEN_INIT((token),yaml_ALIAS_TOKEN,(start_mark),(end_mark)), \ +// (token).data.alias.value = (token_value)) +// +// #define ANCHOR_TOKEN_INIT(token,token_value,start_mark,end_mark) \ +// (TOKEN_INIT((token),yaml_ANCHOR_TOKEN,(start_mark),(end_mark)), \ +// (token).data.anchor.value = (token_value)) +// +// #define TAG_TOKEN_INIT(token,token_handle,token_suffix,start_mark,end_mark) \ +// (TOKEN_INIT((token),yaml_TAG_TOKEN,(start_mark),(end_mark)), \ +// (token).data.tag.handle = (token_handle), \ +// (token).data.tag.suffix = (token_suffix)) +// +// #define SCALAR_TOKEN_INIT(token,token_value,token_length,token_style,start_mark,end_mark) \ +// (TOKEN_INIT((token),yaml_SCALAR_TOKEN,(start_mark),(end_mark)), \ +// (token).data.scalar.value = (token_value), \ +// (token).data.scalar.length = (token_length), \ +// (token).data.scalar.style = (token_style)) +// +// #define VERSION_DIRECTIVE_TOKEN_INIT(token,token_major,token_minor,start_mark,end_mark) \ +// (TOKEN_INIT((token),yaml_VERSION_DIRECTIVE_TOKEN,(start_mark),(end_mark)), \ +// (token).data.version_directive.major = (token_major), \ +// (token).data.version_directive.minor = (token_minor)) +// +// #define TAG_DIRECTIVE_TOKEN_INIT(token,token_handle,token_prefix,start_mark,end_mark) \ +// (TOKEN_INIT((token),yaml_TAG_DIRECTIVE_TOKEN,(start_mark),(end_mark)), \ +// (token).data.tag_directive.handle = (token_handle), \ +// (token).data.tag_directive.prefix = (token_prefix)) +// +// /* +// * Event initializers. +// */ +// +// #define EVENT_INIT(event,event_type,event_start_mark,event_end_mark) \ +// (memset(&(event), 0, sizeof(yaml_event_t)), \ +// (event).type = (event_type), \ +// (event).start_mark = (event_start_mark), \ +// (event).end_mark = (event_end_mark)) +// +// #define STREAM_START_EVENT_INIT(event,event_encoding,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_STREAM_START_EVENT,(start_mark),(end_mark)), \ +// (event).data.stream_start.encoding = (event_encoding)) +// +// #define STREAM_END_EVENT_INIT(event,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_STREAM_END_EVENT,(start_mark),(end_mark))) +// +// #define DOCUMENT_START_EVENT_INIT(event,event_version_directive, \ +// event_tag_directives_start,event_tag_directives_end,event_implicit,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_DOCUMENT_START_EVENT,(start_mark),(end_mark)), \ +// (event).data.document_start.version_directive = (event_version_directive), \ +// (event).data.document_start.tag_directives.start = (event_tag_directives_start), \ +// (event).data.document_start.tag_directives.end = (event_tag_directives_end), \ +// (event).data.document_start.implicit = (event_implicit)) +// +// #define DOCUMENT_END_EVENT_INIT(event,event_implicit,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_DOCUMENT_END_EVENT,(start_mark),(end_mark)), \ +// (event).data.document_end.implicit = (event_implicit)) +// +// #define ALIAS_EVENT_INIT(event,event_anchor,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_ALIAS_EVENT,(start_mark),(end_mark)), \ +// (event).data.alias.anchor = (event_anchor)) +// +// #define SCALAR_EVENT_INIT(event,event_anchor,event_tag,event_value,event_length, \ +// event_plain_implicit, event_quoted_implicit,event_style,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_SCALAR_EVENT,(start_mark),(end_mark)), \ +// (event).data.scalar.anchor = (event_anchor), \ +// (event).data.scalar.tag = (event_tag), \ +// (event).data.scalar.value = (event_value), \ +// (event).data.scalar.length = (event_length), \ +// (event).data.scalar.plain_implicit = (event_plain_implicit), \ +// (event).data.scalar.quoted_implicit = (event_quoted_implicit), \ +// (event).data.scalar.style = (event_style)) +// +// #define SEQUENCE_START_EVENT_INIT(event,event_anchor,event_tag, \ +// event_implicit,event_style,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_SEQUENCE_START_EVENT,(start_mark),(end_mark)), \ +// (event).data.sequence_start.anchor = (event_anchor), \ +// (event).data.sequence_start.tag = (event_tag), \ +// (event).data.sequence_start.implicit = (event_implicit), \ +// (event).data.sequence_start.style = (event_style)) +// +// #define SEQUENCE_END_EVENT_INIT(event,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_SEQUENCE_END_EVENT,(start_mark),(end_mark))) +// +// #define MAPPING_START_EVENT_INIT(event,event_anchor,event_tag, \ +// event_implicit,event_style,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_MAPPING_START_EVENT,(start_mark),(end_mark)), \ +// (event).data.mapping_start.anchor = (event_anchor), \ +// (event).data.mapping_start.tag = (event_tag), \ +// (event).data.mapping_start.implicit = (event_implicit), \ +// (event).data.mapping_start.style = (event_style)) +// +// #define MAPPING_END_EVENT_INIT(event,start_mark,end_mark) \ +// (EVENT_INIT((event),yaml_MAPPING_END_EVENT,(start_mark),(end_mark))) +// +// /* +// * Document initializer. +// */ +// +// #define DOCUMENT_INIT(document,document_nodes_start,document_nodes_end, \ +// document_version_directive,document_tag_directives_start, \ +// document_tag_directives_end,document_start_implicit, \ +// document_end_implicit,document_start_mark,document_end_mark) \ +// (memset(&(document), 0, sizeof(yaml_document_t)), \ +// (document).nodes.start = (document_nodes_start), \ +// (document).nodes.end = (document_nodes_end), \ +// (document).nodes.top = (document_nodes_start), \ +// (document).version_directive = (document_version_directive), \ +// (document).tag_directives.start = (document_tag_directives_start), \ +// (document).tag_directives.end = (document_tag_directives_end), \ +// (document).start_implicit = (document_start_implicit), \ +// (document).end_implicit = (document_end_implicit), \ +// (document).start_mark = (document_start_mark), \ +// (document).end_mark = (document_end_mark)) +// +// /* +// * Node initializers. +// */ +// +// #define NODE_INIT(node,node_type,node_tag,node_start_mark,node_end_mark) \ +// (memset(&(node), 0, sizeof(yaml_node_t)), \ +// (node).type = (node_type), \ +// (node).tag = (node_tag), \ +// (node).start_mark = (node_start_mark), \ +// (node).end_mark = (node_end_mark)) +// +// #define SCALAR_NODE_INIT(node,node_tag,node_value,node_length, \ +// node_style,start_mark,end_mark) \ +// (NODE_INIT((node),yaml_SCALAR_NODE,(node_tag),(start_mark),(end_mark)), \ +// (node).data.scalar.value = (node_value), \ +// (node).data.scalar.length = (node_length), \ +// (node).data.scalar.style = (node_style)) +// +// #define SEQUENCE_NODE_INIT(node,node_tag,node_items_start,node_items_end, \ +// node_style,start_mark,end_mark) \ +// (NODE_INIT((node),yaml_SEQUENCE_NODE,(node_tag),(start_mark),(end_mark)), \ +// (node).data.sequence.items.start = (node_items_start), \ +// (node).data.sequence.items.end = (node_items_end), \ +// (node).data.sequence.items.top = (node_items_start), \ +// (node).data.sequence.style = (node_style)) +// +// #define MAPPING_NODE_INIT(node,node_tag,node_pairs_start,node_pairs_end, \ +// node_style,start_mark,end_mark) \ +// (NODE_INIT((node),yaml_MAPPING_NODE,(node_tag),(start_mark),(end_mark)), \ +// (node).data.mapping.pairs.start = (node_pairs_start), \ +// (node).data.mapping.pairs.end = (node_pairs_end), \ +// (node).data.mapping.pairs.top = (node_pairs_start), \ +// (node).data.mapping.style = (node_style)) +// diff --git a/vendor/github.com/cloudfoundry-incubator/candiedyaml/yamlh.go b/vendor/github.com/cloudfoundry-incubator/candiedyaml/yamlh.go new file mode 100644 index 0000000..d608dbb --- /dev/null +++ b/vendor/github.com/cloudfoundry-incubator/candiedyaml/yamlh.go @@ -0,0 +1,953 @@ +/* +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package candiedyaml + +import ( + "fmt" + "io" +) + +/** The version directive data. */ +type yaml_version_directive_t struct { + major int // The major version number + minor int // The minor version number +} + +/** The tag directive data. */ +type yaml_tag_directive_t struct { + handle []byte // The tag handle + prefix []byte // The tag prefix +} + +/** The stream encoding. */ +type yaml_encoding_t int + +const ( + /** Let the parser choose the encoding. */ + yaml_ANY_ENCODING yaml_encoding_t = iota + /** The defau lt UTF-8 encoding. */ + yaml_UTF8_ENCODING + /** The UTF-16-LE encoding with BOM. */ + yaml_UTF16LE_ENCODING + /** The UTF-16-BE encoding with BOM. */ + yaml_UTF16BE_ENCODING +) + +/** Line break types. */ +type yaml_break_t int + +const ( + yaml_ANY_BREAK yaml_break_t = iota /** Let the parser choose the break type. */ + yaml_CR_BREAK /** Use CR for line breaks (Mac style). */ + yaml_LN_BREAK /** Use LN for line breaks (Unix style). */ + yaml_CRLN_BREAK /** Use CR LN for line breaks (DOS style). */ +) + +/** Many bad things could happen with the parser and emitter. */ +type YAML_error_type_t int + +const ( + /** No error is produced. */ + yaml_NO_ERROR YAML_error_type_t = iota + + /** Cannot allocate or reallocate a block of memory. */ + yaml_MEMORY_ERROR + + /** Cannot read or decode the input stream. */ + yaml_READER_ERROR + /** Cannot scan the input stream. */ + yaml_SCANNER_ERROR + /** Cannot parse the input stream. */ + yaml_PARSER_ERROR + /** Cannot compose a YAML document. */ + yaml_COMPOSER_ERROR + + /** Cannot write to the output stream. */ + yaml_WRITER_ERROR + /** Cannot emit a YAML stream. */ + yaml_EMITTER_ERROR +) + +/** The pointer position. */ +type YAML_mark_t struct { + /** The position index. */ + index int + + /** The position line. */ + line int + + /** The position column. */ + column int +} + +func (m YAML_mark_t) String() string { + return fmt.Sprintf("line %d, column %d", m.line, m.column) +} + +/** @} */ + +/** + * @defgroup styles Node Styles + * @{ + */ + +type yaml_style_t int + +/** Scalar styles. */ +type yaml_scalar_style_t yaml_style_t + +const ( + /** Let the emitter choose the style. */ + yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota + + /** The plain scalar style. */ + yaml_PLAIN_SCALAR_STYLE + + /** The single-quoted scalar style. */ + yaml_SINGLE_QUOTED_SCALAR_STYLE + /** The double-quoted scalar style. */ + yaml_DOUBLE_QUOTED_SCALAR_STYLE + + /** The literal scalar style. */ + yaml_LITERAL_SCALAR_STYLE + /** The folded scalar style. */ + yaml_FOLDED_SCALAR_STYLE +) + +/** Sequence styles. */ +type yaml_sequence_style_t yaml_style_t + +const ( + /** Let the emitter choose the style. */ + yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota + + /** The block sequence style. */ + yaml_BLOCK_SEQUENCE_STYLE + /** The flow sequence style. */ + yaml_FLOW_SEQUENCE_STYLE +) + +/** Mapping styles. */ +type yaml_mapping_style_t yaml_style_t + +const ( + /** Let the emitter choose the style. */ + yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota + + /** The block mapping style. */ + yaml_BLOCK_MAPPING_STYLE + /** The flow mapping style. */ + yaml_FLOW_MAPPING_STYLE + +/* yaml_FLOW_SET_MAPPING_STYLE */ +) + +/** @} */ + +/** + * @defgroup tokens Tokens + * @{ + */ + +/** Token types. */ +type yaml_token_type_t int + +const ( + /** An empty token. */ + yaml_NO_TOKEN yaml_token_type_t = iota + + /** A STREAM-START token. */ + yaml_STREAM_START_TOKEN + /** A STREAM-END token. */ + yaml_STREAM_END_TOKEN + + /** A VERSION-DIRECTIVE token. */ + yaml_VERSION_DIRECTIVE_TOKEN + /** A TAG-DIRECTIVE token. */ + yaml_TAG_DIRECTIVE_TOKEN + /** A DOCUMENT-START token. */ + yaml_DOCUMENT_START_TOKEN + /** A DOCUMENT-END token. */ + yaml_DOCUMENT_END_TOKEN + + /** A BLOCK-SEQUENCE-START token. */ + yaml_BLOCK_SEQUENCE_START_TOKEN + /** A BLOCK-SEQUENCE-END token. */ + yaml_BLOCK_MAPPING_START_TOKEN + /** A BLOCK-END token. */ + yaml_BLOCK_END_TOKEN + + /** A FLOW-SEQUENCE-START token. */ + yaml_FLOW_SEQUENCE_START_TOKEN + /** A FLOW-SEQUENCE-END token. */ + yaml_FLOW_SEQUENCE_END_TOKEN + /** A FLOW-MAPPING-START token. */ + yaml_FLOW_MAPPING_START_TOKEN + /** A FLOW-MAPPING-END token. */ + yaml_FLOW_MAPPING_END_TOKEN + + /** A BLOCK-ENTRY token. */ + yaml_BLOCK_ENTRY_TOKEN + /** A FLOW-ENTRY token. */ + yaml_FLOW_ENTRY_TOKEN + /** A KEY token. */ + yaml_KEY_TOKEN + /** A VALUE token. */ + yaml_VALUE_TOKEN + + /** An ALIAS token. */ + yaml_ALIAS_TOKEN + /** An ANCHOR token. */ + yaml_ANCHOR_TOKEN + /** A TAG token. */ + yaml_TAG_TOKEN + /** A SCALAR token. */ + yaml_SCALAR_TOKEN +) + +/** The token structure. */ +type yaml_token_t struct { + + /** The token type. */ + token_type yaml_token_type_t + + /** The token data. */ + /** The stream start (for @c yaml_STREAM_START_TOKEN). */ + encoding yaml_encoding_t + + /** The alias (for @c yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN,yaml_TAG_TOKEN ). */ + /** The anchor (for @c ). */ + /** The scalar value (for @c ). */ + value []byte + + /** The tag suffix. */ + suffix []byte + + /** The scalar value (for @c yaml_SCALAR_TOKEN). */ + /** The scalar style. */ + style yaml_scalar_style_t + + /** The version directive (for @c yaml_VERSION_DIRECTIVE_TOKEN). */ + version_directive yaml_version_directive_t + + /** The tag directive (for @c yaml_TAG_DIRECTIVE_TOKEN). */ + prefix []byte + + /** The beginning of the token. */ + start_mark YAML_mark_t + /** The end of the token. */ + end_mark YAML_mark_t + + major, minor int +} + +/** + * @defgroup events Events + * @{ + */ + +/** Event types. */ +type yaml_event_type_t int + +const ( + /** An empty event. */ + yaml_NO_EVENT yaml_event_type_t = iota + + /** A STREAM-START event. */ + yaml_STREAM_START_EVENT + /** A STREAM-END event. */ + yaml_STREAM_END_EVENT + + /** A DOCUMENT-START event. */ + yaml_DOCUMENT_START_EVENT + /** A DOCUMENT-END event. */ + yaml_DOCUMENT_END_EVENT + + /** An ALIAS event. */ + yaml_ALIAS_EVENT + /** A SCALAR event. */ + yaml_SCALAR_EVENT + + /** A SEQUENCE-START event. */ + yaml_SEQUENCE_START_EVENT + /** A SEQUENCE-END event. */ + yaml_SEQUENCE_END_EVENT + + /** A MAPPING-START event. */ + yaml_MAPPING_START_EVENT + /** A MAPPING-END event. */ + yaml_MAPPING_END_EVENT +) + +/** The event structure. */ +type yaml_event_t struct { + + /** The event type. */ + event_type yaml_event_type_t + + /** The stream parameters (for @c yaml_STREAM_START_EVENT). */ + encoding yaml_encoding_t + + /** The document parameters (for @c yaml_DOCUMENT_START_EVENT). */ + version_directive *yaml_version_directive_t + + /** The beginning and end of the tag directives list. */ + tag_directives []yaml_tag_directive_t + + /** The document parameters (for @c yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT,yaml_MAPPING_START_EVENT). */ + /** Is the document indicator implicit? */ + implicit bool + + /** The alias parameters (for @c yaml_ALIAS_EVENT,yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). */ + /** The anchor. */ + anchor []byte + + /** The scalar parameters (for @c yaml_SCALAR_EVENT,yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). */ + /** The tag. */ + tag []byte + /** The scalar value. */ + value []byte + + /** Is the tag optional for the plain style? */ + plain_implicit bool + /** Is the tag optional for any non-plain style? */ + quoted_implicit bool + + /** The sequence parameters (for @c yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). */ + /** The sequence style. */ + /** The scalar style. */ + style yaml_style_t + + /** The beginning of the event. */ + start_mark, end_mark YAML_mark_t +} + +/** + * @defgroup nodes Nodes + * @{ + */ + +const ( + /** The tag @c !!null with the only possible value: @c null. */ + yaml_NULL_TAG = "tag:yaml.org,2002:null" + /** The tag @c !!bool with the values: @c true and @c falce. */ + yaml_BOOL_TAG = "tag:yaml.org,2002:bool" + /** The tag @c !!str for string values. */ + yaml_STR_TAG = "tag:yaml.org,2002:str" + /** The tag @c !!int for integer values. */ + yaml_INT_TAG = "tag:yaml.org,2002:int" + /** The tag @c !!float for float values. */ + yaml_FLOAT_TAG = "tag:yaml.org,2002:float" + /** The tag @c !!timestamp for date and time values. */ + yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" + + /** The tag @c !!seq is used to denote sequences. */ + yaml_SEQ_TAG = "tag:yaml.org,2002:seq" + /** The tag @c !!map is used to denote mapping. */ + yaml_MAP_TAG = "tag:yaml.org,2002:map" + + /** The default scalar tag is @c !!str. */ + yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG + /** The default sequence tag is @c !!seq. */ + yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG + /** The default mapping tag is @c !!map. */ + yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG + + yaml_BINARY_TAG = "tag:yaml.org,2002:binary" +) + +/** Node types. */ +type yaml_node_type_t int + +const ( + /** An empty node. */ + yaml_NO_NODE yaml_node_type_t = iota + + /** A scalar node. */ + yaml_SCALAR_NODE + /** A sequence node. */ + yaml_SEQUENCE_NODE + /** A mapping node. */ + yaml_MAPPING_NODE +) + +/** An element of a sequence node. */ +type yaml_node_item_t int + +/** An element of a mapping node. */ +type yaml_node_pair_t struct { + /** The key of the element. */ + key int + /** The value of the element. */ + value int +} + +/** The node structure. */ +type yaml_node_t struct { + + /** The node type. */ + node_type yaml_node_type_t + + /** The node tag. */ + tag []byte + + /** The scalar parameters (for @c yaml_SCALAR_NODE). */ + scalar struct { + /** The scalar value. */ + value []byte + /** The scalar style. */ + style yaml_scalar_style_t + } + + /** The sequence parameters (for @c yaml_SEQUENCE_NODE). */ + sequence struct { + /** The stack of sequence items. */ + items []yaml_node_item_t + /** The sequence style. */ + style yaml_sequence_style_t + } + + /** The mapping parameters (for @c yaml_MAPPING_NODE). */ + mapping struct { + /** The stack of mapping pairs (key, value). */ + pairs []yaml_node_pair_t + /** The mapping style. */ + style yaml_mapping_style_t + } + + /** The beginning of the node. */ + start_mark YAML_mark_t + /** The end of the node. */ + end_mark YAML_mark_t +} + +/** The document structure. */ +type yaml_document_t struct { + + /** The document nodes. */ + nodes []yaml_node_t + + /** The version directive. */ + version_directive *yaml_version_directive_t + + /** The list of tag directives. */ + tags []yaml_tag_directive_t + + /** Is the document start indicator implicit? */ + start_implicit bool + /** Is the document end indicator implicit? */ + end_implicit bool + + /** The beginning of the document. */ + start_mark YAML_mark_t + /** The end of the document. */ + end_mark YAML_mark_t +} + +/** + * The prototype of a read handler. + * + * The read handler is called when the parser needs to read more bytes from the + * source. The handler should write not more than @a size bytes to the @a + * buffer. The number of written bytes should be set to the @a length variable. + * + * @param[in,out] data A pointer to an application data specified by + * yaml_parser_set_input(). + * @param[out] buffer The buffer to write the data from the source. + * @param[in] size The size of the buffer. + * @param[out] size_read The actual number of bytes read from the source. + * + * @returns On success, the handler should return @c 1. If the handler failed, + * the returned value should be @c 0. On EOF, the handler should set the + * @a size_read to @c 0 and return @c 1. + */ + +type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) + +/** + * This structure holds information about a potential simple key. + */ + +type yaml_simple_key_t struct { + /** Is a simple key possible? */ + possible bool + + /** Is a simple key required? */ + required bool + + /** The number of the token. */ + token_number int + + /** The position mark. */ + mark YAML_mark_t +} + +/** + * The states of the parser. + */ +type yaml_parser_state_t int + +const ( + /** Expect STREAM-START. */ + yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota + /** Expect the beginning of an implicit document. */ + yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE + /** Expect DOCUMENT-START. */ + yaml_PARSE_DOCUMENT_START_STATE + /** Expect the content of a document. */ + yaml_PARSE_DOCUMENT_CONTENT_STATE + /** Expect DOCUMENT-END. */ + yaml_PARSE_DOCUMENT_END_STATE + /** Expect a block node. */ + yaml_PARSE_BLOCK_NODE_STATE + /** Expect a block node or indentless sequence. */ + yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE + /** Expect a flow node. */ + yaml_PARSE_FLOW_NODE_STATE + /** Expect the first entry of a block sequence. */ + yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE + /** Expect an entry of a block sequence. */ + yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE + /** Expect an entry of an indentless sequence. */ + yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE + /** Expect the first key of a block mapping. */ + yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE + /** Expect a block mapping key. */ + yaml_PARSE_BLOCK_MAPPING_KEY_STATE + /** Expect a block mapping value. */ + yaml_PARSE_BLOCK_MAPPING_VALUE_STATE + /** Expect the first entry of a flow sequence. */ + yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE + /** Expect an entry of a flow sequence. */ + yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE + /** Expect a key of an ordered mapping. */ + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE + /** Expect a value of an ordered mapping. */ + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE + /** Expect the and of an ordered mapping entry. */ + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE + /** Expect the first key of a flow mapping. */ + yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE + /** Expect a key of a flow mapping. */ + yaml_PARSE_FLOW_MAPPING_KEY_STATE + /** Expect a value of a flow mapping. */ + yaml_PARSE_FLOW_MAPPING_VALUE_STATE + /** Expect an empty value of a flow mapping. */ + yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE + /** Expect nothing. */ + yaml_PARSE_END_STATE +) + +/** + * This structure holds aliases data. + */ + +type yaml_alias_data_t struct { + /** The anchor. */ + anchor []byte + /** The node id. */ + index int + /** The anchor mark. */ + mark YAML_mark_t +} + +/** + * The parser structure. + * + * All members are internal. Manage the structure using the @c yaml_parser_ + * family of functions. + */ + +type yaml_parser_t struct { + + /** + * @name Error handling + * @{ + */ + + /** Error type. */ + error YAML_error_type_t + /** Error description. */ + problem string + /** The byte about which the problem occured. */ + problem_offset int + /** The problematic value (@c -1 is none). */ + problem_value int + /** The problem position. */ + problem_mark YAML_mark_t + /** The error context. */ + context string + /** The context position. */ + context_mark YAML_mark_t + + /** + * @} + */ + + /** + * @name Reader stuff + * @{ + */ + + /** Read handler. */ + read_handler yaml_read_handler_t + + /** Reader input data. */ + input_reader io.Reader + input []byte + input_pos int + + /** EOF flag */ + eof bool + + /** The working buffer. */ + buffer []byte + buffer_pos int + + /* The number of unread characters in the buffer. */ + unread int + + /** The raw buffer. */ + raw_buffer []byte + raw_buffer_pos int + + /** The input encoding. */ + encoding yaml_encoding_t + + /** The offset of the current position (in bytes). */ + offset int + + /** The mark of the current position. */ + mark YAML_mark_t + + /** + * @} + */ + + /** + * @name Scanner stuff + * @{ + */ + + /** Have we started to scan the input stream? */ + stream_start_produced bool + + /** Have we reached the end of the input stream? */ + stream_end_produced bool + + /** The number of unclosed '[' and '{' indicators. */ + flow_level int + + /** The tokens queue. */ + tokens []yaml_token_t + tokens_head int + + /** The number of tokens fetched from the queue. */ + tokens_parsed int + + /* Does the tokens queue contain a token ready for dequeueing. */ + token_available bool + + /** The indentation levels stack. */ + indents []int + + /** The current indentation level. */ + indent int + + /** May a simple key occur at the current position? */ + simple_key_allowed bool + + /** The stack of simple keys. */ + simple_keys []yaml_simple_key_t + + /** + * @} + */ + + /** + * @name Parser stuff + * @{ + */ + + /** The parser states stack. */ + states []yaml_parser_state_t + + /** The current parser state. */ + state yaml_parser_state_t + + /** The stack of marks. */ + marks []YAML_mark_t + + /** The list of TAG directives. */ + tag_directives []yaml_tag_directive_t + + /** + * @} + */ + + /** + * @name Dumper stuff + * @{ + */ + + /** The alias data. */ + aliases []yaml_alias_data_t + + /** The currently parsed document. */ + document *yaml_document_t + + /** + * @} + */ + +} + +/** + * The prototype of a write handler. + * + * The write handler is called when the emitter needs to flush the accumulated + * characters to the output. The handler should write @a size bytes of the + * @a buffer to the output. + * + * @param[in,out] data A pointer to an application data specified by + * yaml_emitter_set_output(). + * @param[in] buffer The buffer with bytes to be written. + * @param[in] size The size of the buffer. + * + * @returns On success, the handler should return @c 1. If the handler failed, + * the returned value should be @c 0. + */ + +type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error + +/** The emitter states. */ +type yaml_emitter_state_t int + +const ( + /** Expect STREAM-START. */ + yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota + /** Expect the first DOCUMENT-START or STREAM-END. */ + yaml_EMIT_FIRST_DOCUMENT_START_STATE + /** Expect DOCUMENT-START or STREAM-END. */ + yaml_EMIT_DOCUMENT_START_STATE + /** Expect the content of a document. */ + yaml_EMIT_DOCUMENT_CONTENT_STATE + /** Expect DOCUMENT-END. */ + yaml_EMIT_DOCUMENT_END_STATE + /** Expect the first item of a flow sequence. */ + yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE + /** Expect an item of a flow sequence. */ + yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE + /** Expect the first key of a flow mapping. */ + yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE + /** Expect a key of a flow mapping. */ + yaml_EMIT_FLOW_MAPPING_KEY_STATE + /** Expect a value for a simple key of a flow mapping. */ + yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE + /** Expect a value of a flow mapping. */ + yaml_EMIT_FLOW_MAPPING_VALUE_STATE + /** Expect the first item of a block sequence. */ + yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE + /** Expect an item of a block sequence. */ + yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE + /** Expect the first key of a block mapping. */ + yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE + /** Expect the key of a block mapping. */ + yaml_EMIT_BLOCK_MAPPING_KEY_STATE + /** Expect a value for a simple key of a block mapping. */ + yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE + /** Expect a value of a block mapping. */ + yaml_EMIT_BLOCK_MAPPING_VALUE_STATE + /** Expect nothing. */ + yaml_EMIT_END_STATE +) + +/** + * The emitter structure. + * + * All members are internal. Manage the structure using the @c yaml_emitter_ + * family of functions. + */ + +type yaml_emitter_t struct { + + /** + * @name Error handling + * @{ + */ + + /** Error type. */ + error YAML_error_type_t + /** Error description. */ + problem string + + /** + * @} + */ + + /** + * @name Writer stuff + * @{ + */ + + /** Write handler. */ + write_handler yaml_write_handler_t + + /** Standard (string or file) output data. */ + output_buffer *[]byte + output_writer io.Writer + + /** The working buffer. */ + buffer []byte + buffer_pos int + + /** The raw buffer. */ + raw_buffer []byte + raw_buffer_pos int + + /** The stream encoding. */ + encoding yaml_encoding_t + + /** + * @} + */ + + /** + * @name Emitter stuff + * @{ + */ + + /** If the output is in the canonical style? */ + canonical bool + /** The number of indentation spaces. */ + best_indent int + /** The preferred width of the output lines. */ + best_width int + /** Allow unescaped non-ASCII characters? */ + unicode bool + /** The preferred line break. */ + line_break yaml_break_t + + /** The stack of states. */ + states []yaml_emitter_state_t + + /** The current emitter state. */ + state yaml_emitter_state_t + + /** The event queue. */ + events []yaml_event_t + events_head int + + /** The stack of indentation levels. */ + indents []int + + /** The list of tag directives. */ + tag_directives []yaml_tag_directive_t + + /** The current indentation level. */ + indent int + + /** The current flow level. */ + flow_level int + + /** Is it the document root context? */ + root_context bool + /** Is it a sequence context? */ + sequence_context bool + /** Is it a mapping context? */ + mapping_context bool + /** Is it a simple mapping key context? */ + simple_key_context bool + + /** The current line. */ + line int + /** The current column. */ + column int + /** If the last character was a whitespace? */ + whitespace bool + /** If the last character was an indentation character (' ', '-', '?', ':')? */ + indention bool + /** If an explicit document end is required? */ + open_ended bool + + /** Anchor analysis. */ + anchor_data struct { + /** The anchor value. */ + anchor []byte + /** Is it an alias? */ + alias bool + } + + /** Tag analysis. */ + tag_data struct { + /** The tag handle. */ + handle []byte + /** The tag suffix. */ + suffix []byte + } + + /** Scalar analysis. */ + scalar_data struct { + /** The scalar value. */ + value []byte + /** Does the scalar contain line breaks? */ + multiline bool + /** Can the scalar be expessed in the flow plain style? */ + flow_plain_allowed bool + /** Can the scalar be expressed in the block plain style? */ + block_plain_allowed bool + /** Can the scalar be expressed in the single quoted style? */ + single_quoted_allowed bool + /** Can the scalar be expressed in the literal or folded styles? */ + block_allowed bool + /** The output style. */ + style yaml_scalar_style_t + } + + /** + * @} + */ + + /** + * @name Dumper stuff + * @{ + */ + + /** If the stream was already opened? */ + opened bool + /** If the stream was already closed? */ + closed bool + + /** The information associated with the document nodes. */ + anchors *struct { + /** The number of references. */ + references int + /** The anchor id. */ + anchor int + /** If the node has been emitted? */ + serialized bool + } + + /** The last assigned anchor id. */ + last_anchor_id int + + /** The currently emitted document. */ + document *yaml_document_t + + /** + * @} + */ + +} From 997126c647330f1094cd2d26a92931900a14badc Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Mon, 31 Dec 2018 17:18:19 +0100 Subject: [PATCH 10/13] x509 support --- README.md | 257 ++++++++++++++++++++++++++++++++++++- dynaml/call.go | 14 +- dynaml/expression.go | 10 +- dynaml/substr.go | 2 +- dynaml/x509/certificate.go | 226 ++++++++++++++++++++++++++++++++ dynaml/x509/fields.go | 104 +++++++++++++++ dynaml/x509/genkey.go | 78 +++++++++++ dynaml/x509/publickey.go | 45 +++++++ dynaml/x509/utils.go | 249 +++++++++++++++++++++++++++++++++++ flow/flow.go | 2 + 10 files changed, 981 insertions(+), 6 deletions(-) create mode 100644 dynaml/x509/certificate.go create mode 100644 dynaml/x509/fields.go create mode 100644 dynaml/x509/genkey.go create mode 100644 dynaml/x509/publickey.go create mode 100644 dynaml/x509/utils.go diff --git a/README.md b/README.md index 3876c37..adb45ab 100644 --- a/README.md +++ b/README.md @@ -95,6 +95,10 @@ Contents: - [(( makemap(fieldlist) ))](#-makemapfieldlist-) - [(( makemap(key, value) ))](#-makemapkey-value-) - [(( merge(map1, map2) ))](#-mergemap1-map2-) + - [X509 Functions](#x509-functions) + - [(( x509genkey(spec) ))](#-x509genkeyspec-) + - [(( x509publickey(key) ))](#-x509publickeykey-) + - [(( x509cert(spec) ))](#-x509certspec-) - [(( lambda |x|->x ":" port ))](#-lambda-x-x--port-) - [(( &temporary ))](#-temporary-) - [Mappings](#mappings) @@ -1859,7 +1863,7 @@ result: bob: 26 ``` -A map might also be given by a map expression. Here it is possible to specify +A map might also be given by a [map expression](#--alice--25--). Here it is possible to specify dynaml expressions using the usual syntax: e.g.: @@ -1884,6 +1888,189 @@ result: bob: 100 ``` +### X509 Functions + +spiff supports some useful functions to work with _X509_ certificates and keys. +Please refer also to the [Useful to Know](#useful-to-know) section to find some +tips for providing state. + +#### `(( x509genkey(spec) ))` + +This function can be used generate private RSA or ECDSA keys. The result will +be a PEM encoded key as multi line string value. If a key size (integer or string) +is given as argument, an RSA key will be generated with the given key size +(for example 2048). Given one of the string values + +- "P224" +- "P256" +- "P384" +- "P521" + +the function will generate an appropriate ECDSA key. + +e.g.: + +```yaml +keys: + key: (( x509genkey(2048) )) +``` + +resolves to something like + +```yaml +key: |+ + -----BEGIN RSA PRIVATE KEY----- + MIIEpAIBAAKCAQEAwxdZDfzxqz4hlRwTL060pm1J12mkJlXF0VnqpQjpnRTq0rns + CxMxvSfb4crmWg6BRaI1cEN/zmNcT2sO+RZ4jIOZ2Vi8ujqcbzxqyoBQuMNwdb32 + ... + oqMC9QKBgQDEVP7FDuJEnCpzqddiXTC+8NsC+1+2/fk+ypj2qXMxcNiNG1Az95YE + gRXbnghNU7RUajILoimAHPItqeeskd69oB77gig4bWwrzkijFXv0dOjDhQlmKY6c + pNWsImF7CNhjTP7L27LKk49a+IGutyYLnXmrlarcNYeCQBin1meydA== + -----END RSA PRIVATE KEY----- +``` + +#### `(( x509publickey(key) ))` + +For a given key in PEM format (for example generated with the [x509genkey](#-x509genkeyspec-) +function) this function extracts the public key and returns it again in PEM format as a +multi-line string. + +e.g.: + +```yaml +keys: + key: (( x509genkey(2048) )) + public: (( x509publickey(key) +``` + +resolves to something like + +```yaml +key: |+ + -----BEGIN RSA PRIVATE KEY----- + MIIEpAIBAAKCAQEAwxdZDfzxqz4hlRwTL060pm1J12mkJlXF0VnqpQjpnRTq0rns + CxMxvSfb4crmWg6BRaI1cEN/zmNcT2sO+RZ4jIOZ2Vi8ujqcbzxqyoBQuMNwdb32 + ... + oqMC9QKBgQDEVP7FDuJEnCpzqddiXTC+8NsC+1+2/fk+ypj2qXMxcNiNG1Az95YE + gRXbnghNU7RUajILoimAHPItqeeskd69oB77gig4bWwrzkijFXv0dOjDhQlmKY6c + pNWsImF7CNhjTP7L27LKk49a+IGutyYLnXmrlarcNYeCQBin1meydA== + -----END RSA PRIVATE KEY----- +public: |+ + -----BEGIN RSA PUBLIC KEY----- + MIIBCgKCAQEAwxdZDfzxqz4hlRwTL060pm1J12mkJlXF0VnqpQjpnRTq0rnsCxMx + vSfb4crmWg6BRaI1cEN/zmNcT2sO+RZ4jIOZ2Vi8ujqcbzxqyoBQuMNwdb325Bf/ + ... + VzYqyeQyvvRbNe73BXc5temCaQayzsbghkoWK+Wrc33yLsvpeVQBcB93Xhus+Lt1 + 1lxsoIrQf/HBsiu/5Q3M8L6klxeAUcDbYwIDAQAB + -----END RSA PUBLIC KEY----- +``` + +#### `(( x509cert(spec) ))` + +The function `x509cert` creates locally signed certificates, either a self signed +one or a certificate signed by a given ca. It returns PEM encoded certificate +as a multi-line string value. + +The single _spec_ parameter take a map with some optional and non optional +fields used to specify the certificate information. It can be an +[inline map expression](#--alice--25--) or any map reference into the rest of +the yaml document. + +The following map fields are observed: + +| Field Name | Type | Required | Meaning | +| ------------| ---- | -------- | ------- | +| `commonName` | string | optional | Common Name field of the subject | +| `organization` | string or string list | optional | Organization field of the subject | +| `country` | string or string list | optional | Country field of the subject | +| `isCA` | bool | optional | CA option of certificate | +| `usage` | string or string list | required | usage keys for the certificate (see below) | +| `validity` | integer | optional | validity interval in hours | +| `validFrom` | string | optional | start time in the format "Jan 1 01:22:31 2019" | +| `hosts` | string or string list | optional | List of DNS names or IP addresses | +| `privateKey` | string | required or publicKey | private key to geberate the certificate for | +| `publicKey` | string | required or privateKey| public key to generate the certificate for | +| `caCert` | string | optional| certificate to sign with | +| `caPrivateKey` | string | optional| priavte key for `caCert` | + +For self-signed certificates, the `privateKey`field must be set. `publicKey` +and the `ca` fields should be omitted. If the `caCert`field is given, the `caKey` +field is required, also. If the `privateKey`field is given together with the +`caCert`, the public key for the certificate is extracted from the private key. + +Additional fields are silently ignored. + +The following usage keys are supported (case is ignored): + +| Key | Meaning | +| ------------| ---- | +| `Signature` | x509.KeyUsageDigitalSignature | +| `Commitment` | x509.KeyUsageContentCommitment | +| `KeyEncipherment` | x509.KeyUsageKeyEncipherment | +| `DataEncipherment` | x509.KeyUsageDataEncipherment | +| `KeyAgreement` | x509.KeyUsageKeyAgreement | +| `CertSign` | x509.KeyUsageCertSign | +| `CRLSign` | x509.KeyUsageCRLSign | +| `EncipherOnly` | x509.KeyUsageEncipherOnly | +| `DecipherOnly` | x509.KeyUsageDecipherOnly | +| `Any` | x509.ExtKeyUsageAny | +| `ServerAuth` | x509.ExtKeyUsageServerAuth | +| `ClientAuth` | x509.ExtKeyUsageClientAuth | +| `codesigning` | x509.ExtKeyUsageCodeSigning | +| `EmailProtection` | x509.ExtKeyUsageEmailProtection | +| `IPSecEndSystem` | x509.ExtKeyUsageIPSECEndSystem | +| `IPSecTunnel` | x509.ExtKeyUsageIPSECTunnel | +| `IPSecUser` | x509.ExtKeyUsageIPSECUser | +| `TimeStamping` | x509.ExtKeyUsageTimeStamping | +| `OCSPSigning` | x509.ExtKeyUsageOCSPSigning | +| `MicrosoftServerGatedCrypto` | x509.ExtKeyUsageMicrosoftServerGatedCrypto | +| `NetscapeServerGatedCrypto` | x509.ExtKeyUsageNetscapeServerGatedCrypto | +| `MicrosoftCommercialCodeSigning` | x509.ExtKeyUsageMicrosoftCommercialCodeSigning | +| `MicrosoftKernelCodeSigning` | x509.ExtKeyUsageMicrosoftKernelCodeSigning | + + +e.g.: + +```yaml +spec: + <<: (( &local )) + ca: + organization: Mandelsoft + commonName: Uwe Krueger + privateKey: (( data.cakey )) + isCA: true + usage: + - Signature + - KeyEncipherment + +data: + cakey: (( x509genkey(2048) )) + cacert: (( x509cert(spec.ca) )) +``` + +generates a self-signed root certificate and resolves to something like + +```yaml +cakey: |+ + -----BEGIN RSA PRIVATE KEY----- + MIIEpAIBAAKCAQEAwxdZDfzxqz4hlRwTL060pm1J12mkJlXF0VnqpQjpnRTq0rns + CxMxvSfb4crmWg6BRaI1cEN/zmNcT2sO+RZ4jIOZ2Vi8ujqcbzxqyoBQuMNwdb32 + ... + oqMC9QKBgQDEVP7FDuJEnCpzqddiXTC+8NsC+1+2/fk+ypj2qXMxcNiNG1Az95YE + gRXbnghNU7RUajILoimAHPItqeeskd69oB77gig4bWwrzkijFXv0dOjDhQlmKY6c + pNWsImF7CNhjTP7L27LKk49a+IGutyYLnXmrlarcNYeCQBin1meydA== + -----END RSA PRIVATE KEY----- +cacert: |+ + -----BEGIN CERTIFICATE----- + MIIDCjCCAfKgAwIBAgIQb5ex4iGfyCcOa1RvnKSkMDANBgkqhkiG9w0BAQsFADAk + MQ8wDQYDVQQKEwZTQVAgU0UxETAPBgNVBAMTCGdhcmRlbmVyMB4XDTE4MTIzMTE0 + ... + pOUBE3Tgim5rnpa9K9RJ/m8IVqlupcONlxQmP3cCXm/lBEREjODPRNhU11DJwDdJ + 5fd+t5SMEit2BvtTNFXLAwz48EKTxsDPdnHgiQKcbIV8NmgUNPHwXaqRMBLqssKl + Cyvds9xGtAtmZRvYNI0= + -----END CERTIFICATE----- +``` + ## `(( lambda |x|->x ":" port ))` Lambda expressions can be used to define additional anonymous functions. They can be assigned to yaml nodes as values and referenced with path expressions to call the function with approriate arguments in other dynaml expressions. For the final document they are mapped to string values. @@ -3050,6 +3237,74 @@ networks: ... ``` +- _X509_ and providing State + + When generating keys or certificates with the [X509 Functions](#x509-functions) + there will be new keys or certificates for every execution of _spiff_. But + it is also possible to use _spiff_ to maintain key state. A very simple script + could look like this: + + ```bash + #!/bin/bash + DIR="$(dirname "$0")/state" + if [ ! -f "$DIR/state.yaml" ]; then + echo "state:" > "$DIR/state.yaml" + fi + spiff merge "$DIR/template.yaml" "$DIR/state.yaml" > "$DIR/.$$" && mv "$DIR/.$$" "$DIR/state.yaml" + ``` + + It uses a template file (containing the rules) and a state file with the + actual state as stub. The first time it is executed there is an empty state + and the rules are not overridden, therefore the keys and certificates are + generated. Later on, only additional new fields are calculated, the state + fields already containing values just overrule the _dynaml_ expressions + for those fields in the template. + + If a re-generation is required, the state file can just be deleted. + + A template may look like this: + + **state/template.yaml** + ```yaml + spec: + <<: (( &local )) + ca: + organization: Mandelsoft + commonName: rootca + privateKey: (( state.cakey )) + isCA: true + usage: + - Signature + - KeyEncipherment + peer: + organization: Mandelsoft + commonName: etcd + publicKey: (( state.pub )) + caCert: (( state.cacert )) + caPrivateKey: (( state.cakey )) + validity: 100 + usage: + - ServerAuth + - ClientAuth + - KeyEncipherment + hosts: + - etcd.mandelsoft.org + + state: + cakey: (( x509genkey(2048) )) + capub: (( x509publickey(cakey) )) + + cacert: (( x509cert(spec.ca) )) + + key: (( x509genkey(2048) )) + pub: (( x509publickey(key) )) + peer: (( x509cert(spec.peer) )) + + ``` + + The merge then generates a rootca and some TLS certificate signed with + this CA. + # Error Reporting The evaluation of dynaml expressions may fail because of several reasons: diff --git a/dynaml/call.go b/dynaml/call.go index cb8e6d4..0f2d070 100644 --- a/dynaml/call.go +++ b/dynaml/call.go @@ -7,6 +7,14 @@ import ( "github.com/mandelsoft/spiff/debug" ) +type Function func(arguments []interface{}, binding Binding) (interface{}, EvaluationInfo, bool) + +var functions = map[string]Function{} + +func RegisterFunction(name string, f Function) { + functions[name] = f +} + type CallExpr struct { Function Expression Arguments []Expression @@ -165,7 +173,11 @@ func (e CallExpr) Evaluate(binding Binding, locally bool) (interface{}, Evaluati } default: - return info.Error("unknown function '%s'", funcName) + f := functions[funcName] + if f == nil { + return info.Error("unknown function '%s'", funcName) + } + result, sub, ok = f(values, binding) } if ok && (result == nil || isExpression(result)) { diff --git a/dynaml/expression.go b/dynaml/expression.go index b7aef21..d89efb7 100644 --- a/dynaml/expression.go +++ b/dynaml/expression.go @@ -61,14 +61,18 @@ type Expression interface { } func (i *EvaluationInfo) Error(msgfmt interface{}, args ...interface{}) (interface{}, EvaluationInfo, bool) { - i.LocalError = true - i.Issue = yaml.NewIssue(msgfmt.(string), args...) + i.SetError(msgfmt, args...) return nil, *i, false } func (i *EvaluationInfo) SetError(msgfmt interface{}, args ...interface{}) { i.LocalError = true - i.Issue = yaml.NewIssue(msgfmt.(string), args...) + switch f := msgfmt.(type) { + case string: + i.Issue = yaml.NewIssue(f, args...) + default: + i.Issue = yaml.NewIssue("%s", msgfmt) + } } func (i *EvaluationInfo) PropagateError(value interface{}, state Status, msgfmt string, args ...interface{}) (interface{}, EvaluationInfo, bool) { diff --git a/dynaml/substr.go b/dynaml/substr.go index 1a94131..cd331a4 100644 --- a/dynaml/substr.go +++ b/dynaml/substr.go @@ -15,7 +15,7 @@ func func_substr(arguments []interface{}, binding Binding) (interface{}, Evaluat } start, ok := arguments[1].(int64) if !ok { - return info.Error("second argument for substr must be an intenger") + return info.Error("second argument for substr must be an integer") } if start < 0 { start = int64(len(str)) + start diff --git a/dynaml/x509/certificate.go b/dynaml/x509/certificate.go new file mode 100644 index 0000000..d4b8f4d --- /dev/null +++ b/dynaml/x509/certificate.go @@ -0,0 +1,226 @@ +package x509 + +import ( + "bufio" + "bytes" + "crypto/rand" + "crypto/x509" + "crypto/x509/pkix" + "encoding/pem" + "github.com/mandelsoft/spiff/yaml" + "math/big" + "net" + "time" + + . "github.com/mandelsoft/spiff/dynaml" +) + +const F_Cert = "x509cert" + +func init() { + RegisterFunction(F_Cert, func_x509cert) +} + +// one map argument with fields +// usage: []string +// organization: []string +// country: []string (optional) +// commonName: string (optional) +// validFrom : string/date (optional) +// validity : int (hours, optional) +// isCA: boolean (optional) +// hosts: []string (optional) +// privateKey: string +// publicKey: string +// +// caCert: string (optional) +// caPrivateKey: string (optional) +// + +func func_x509cert(arguments []interface{}, binding Binding) (interface{}, EvaluationInfo, bool) { + var err error + info := DefaultInfo() + + if len(arguments) != 1 { + return info.Error("invalid argument count for %s()", F_Cert) + } + fields, ok := arguments[0].(map[string]yaml.Node) + if !ok { + return info.Error("argument for %s must be a map (found %T)", F_Cert, arguments[0]) + } + + isCA, err := getDefaultedBoolField(fields, "isCA", false) + if err != nil { + return info.Error(err) + } + + orgs, err := getStringListField(fields, "organization") + if err != nil { + return info.Error(err) + } + + cn, err := getDefaultedStringField(fields, "commonName", "") + if err != nil { + return info.Error(err) + } + + countries, err := getDefaultedStringListField(fields, "country", nil) + if err != nil { + return info.Error(err) + } + + usages, err := getStringListField(fields, "usage") + if err != nil { + return info.Error(err) + } + + validity, err := getDefaultedIntField(fields, "validity", 24*365) + if err != nil { + return info.Error(err) + } + + hosts, err := getDefaultedStringListField(fields, "hosts", nil) + if err != nil { + return info.Error(err) + } + + privKey, err := getDefaultedStringField(fields, "privateKey", "") + if err != nil { + return info.Error(err) + } + var priv interface{} + if privKey != "" { + priv, err = ParsePrivateKey(privKey) + if err != nil { + return info.Error(err) + } + } + + pubKey, err := getDefaultedStringField(fields, "publicKey", "") + if err != nil { + return info.Error(err) + } + var pub interface{} + if pubKey != "" { + pub, err = ParsePublicKey(pubKey) + if err != nil { + return info.Error(err) + } + } + + if pub == nil { + if priv == nil { + return info.Error("one of 'publicKey' or 'privKey' must be given") + } + pub = publicKey(priv) + } + + caCert, err := getDefaultedStringField(fields, "caCert", "") + if err != nil { + return info.Error(err) + } + var ca *x509.Certificate + if caCert != "" { + ca, err = ParseCertificate(caCert) + if err != nil { + return info.Error("invalid ca certificate: %s", err) + } + } + + caPrivateKey, err := getDefaultedStringField(fields, "caPrivateKey", "") + if err != nil { + return info.Error(err) + } + var capriv = priv + if caPrivateKey != "" { + if ca != nil { + capriv, err = ParsePrivateKey(caPrivateKey) + if err != nil { + return info.Error(err) + } + } + } else { + if ca != nil { + return info.Error("private key for ca required") + } + } + if capriv == nil { + return info.Error("private key for self-signed certificate required") + } + + var notBefore time.Time + validFrom, err := getDefaultedStringField(fields, "validFrom", "") + if err != nil { + return info.Error(err) + } + if validFrom == "" { + notBefore = time.Now() + } else { + notBefore, err = time.Parse("Jan 2 15:04:05 2006", validFrom) + if err != nil { + return info.Error("invalid validFrom fields: %s", err) + } + } + + notAfter := notBefore.Add(time.Duration(validity) * time.Hour) + + serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128) + serialNumber, err := rand.Int(rand.Reader, serialNumberLimit) + if err != nil { + return info.Error("failed to generate serial number: %s", err) + } + + template := &x509.Certificate{ + SerialNumber: serialNumber, + Subject: pkix.Name{ + Organization: orgs, + CommonName: cn, + Country: countries, + }, + NotBefore: notBefore, + NotAfter: notAfter, + + KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, + BasicConstraintsValid: true, + } + + if ca == nil { + ca = template + } + + for _, u := range usages { + k := ParseKeyUsage(u) + if k == nil { + return info.Error("invalid usage key %q", u) + } + k.AddTo(template) + } + + for _, h := range hosts { + if ip := net.ParseIP(h); ip != nil { + template.IPAddresses = append(template.IPAddresses, ip) + } else { + template.DNSNames = append(template.DNSNames, h) + } + } + + if isCA || (template.KeyUsage&x509.KeyUsageCertSign) != 0 { + template.IsCA = true + template.KeyUsage |= x509.KeyUsageCertSign + } + + derBytes, err := x509.CreateCertificate(rand.Reader, template, ca, pub, capriv) + if err != nil { + return info.Error("Failed to create certificate: %s", err) + } + + var b bytes.Buffer + writer := bufio.NewWriter(&b) + + if err := pem.Encode(writer, &pem.Block{Type: "CERTIFICATE", Bytes: derBytes}); err != nil { + return info.Error("failed to write certificate pem block: %s", err) + } + writer.Flush() + return b.String(), info, true +} diff --git a/dynaml/x509/fields.go b/dynaml/x509/fields.go new file mode 100644 index 0000000..5f75d5c --- /dev/null +++ b/dynaml/x509/fields.go @@ -0,0 +1,104 @@ +package x509 + +import ( + "fmt" + "github.com/mandelsoft/spiff/yaml" + "strconv" +) + +func getField(fields map[string]yaml.Node, name string) interface{} { + field := fields[name] + if field == nil { + return nil + } + return field.Value() +} + +func getDefaultedBoolField(fields map[string]yaml.Node, name string, def bool) (bool, error) { + v := getField(fields, name) + if v == nil { + return def, nil + } + switch b := v.(type) { + case bool: + return b, nil + case string: + return strconv.ParseBool(b) + case int64: + return b != 0, nil + default: + return def, fmt.Errorf("invalid type for boolean field %q", name) + } +} + +func getDefaultedStringField(fields map[string]yaml.Node, name string, def string) (string, error) { + v := getField(fields, name) + if v == nil { + return def, nil + } + switch f := v.(type) { + case string: + return f, nil + case bool: + return strconv.FormatBool(f), nil + case int64: + return strconv.FormatInt(f, 10), nil + default: + return "", fmt.Errorf("invalid type for %q", name) + } +} + +func getDefaultedIntField(fields map[string]yaml.Node, name string, def int64) (int64, error) { + v := getField(fields, name) + if v == nil { + return def, nil + } + switch f := v.(type) { + case string: + return strconv.ParseInt(f, 10, 64) + case int64: + return f, nil + default: + return 0, fmt.Errorf("invalid type for %q", name) + } +} + +func getDefaultedStringListField(fields map[string]yaml.Node, name string, def []string) ([]string, error) { + v := getField(fields, name) + if v == nil { + return def, nil + } + switch f := v.(type) { + case string: + return []string{f}, nil + case bool: + return []string{strconv.FormatBool(f)}, nil + case int64: + return []string{strconv.FormatInt(f, 10)}, nil + case []yaml.Node: + r := make([]string, len(f)) + for i, e := range f { + switch ev := e.Value().(type) { + case string: + r[i] = ev + case bool: + r[i] = strconv.FormatBool(ev) + case int64: + r[i] = strconv.FormatInt(ev, 10) + default: + return nil, fmt.Errorf("invalid list element type for %q", name) + } + } + return r, nil + default: + return nil, fmt.Errorf("invalid type for %q", name) + } +} + +func getStringListField(fields map[string]yaml.Node, name string) ([]string, error) { + l, err := getDefaultedStringListField(fields, name, nil) + if l == nil && err != nil { + return nil, fmt.Errorf("field %q is required", name) + } + return l, err +} diff --git a/dynaml/x509/genkey.go b/dynaml/x509/genkey.go new file mode 100644 index 0000000..0c12843 --- /dev/null +++ b/dynaml/x509/genkey.go @@ -0,0 +1,78 @@ +package x509 + +import ( + "bufio" + "bytes" + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/rsa" + "encoding/pem" + "strconv" + + . "github.com/mandelsoft/spiff/dynaml" +) + +const F_GenKey = "x509genkey" + +func init() { + RegisterFunction(F_GenKey, func_x509genkey) +} + +// one optional argument +// - either rsa bit size (int) +// - or ecdsaCurve (string) + +func func_x509genkey(arguments []interface{}, binding Binding) (interface{}, EvaluationInfo, bool) { + var err error + var ok bool + info := DefaultInfo() + ecdsaCurve := "" + rsaBits := int64(2048) + + if len(arguments) > 1 { + return info.Error("invalid argument count for %s([|])", F_GenKey) + } + + if len(arguments) > 0 { + rsaBits, ok = arguments[0].(int64) + if !ok { + str, ok := arguments[0].(string) + if !ok { + return info.Error("argument for %s must be a string or integer", F_GenKey) + } + rsaBits, err = strconv.ParseInt(str, 10, 32) + if err != nil { + ecdsaCurve = str + } + } + } + + var priv interface{} + switch ecdsaCurve { + case "": + priv, err = rsa.GenerateKey(rand.Reader, int(rsaBits)) + case "P224": + priv, err = ecdsa.GenerateKey(elliptic.P224(), rand.Reader) + case "P256": + priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + case "P384": + priv, err = ecdsa.GenerateKey(elliptic.P384(), rand.Reader) + case "P521": + priv, err = ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + default: + return info.Error("Unrecognized elliptic curve: %q", ecdsaCurve) + } + if err != nil { + return info.Error("failed to generate private key: %s", err) + } + + var b bytes.Buffer + writer := bufio.NewWriter(&b) + + if err := pem.Encode(writer, pemBlockForKey(priv)); err != nil { + return info.Error("failed to write key pem block: %s", err) + } + writer.Flush() + return b.String(), info, true +} diff --git a/dynaml/x509/publickey.go b/dynaml/x509/publickey.go new file mode 100644 index 0000000..faffcae --- /dev/null +++ b/dynaml/x509/publickey.go @@ -0,0 +1,45 @@ +package x509 + +import ( + "bufio" + "bytes" + "encoding/pem" + . "github.com/mandelsoft/spiff/dynaml" +) + +const F_PublicKey = "x509publickey" + +func init() { + RegisterFunction(F_PublicKey, func_x509publickey) +} + +// one argument +// - private key pem + +func func_x509publickey(arguments []interface{}, binding Binding) (interface{}, EvaluationInfo, bool) { + var err error + info := DefaultInfo() + + if len(arguments) != 1 { + return info.Error("invalid argument count for %s()", F_PublicKey) + } + + str, ok := arguments[0].(string) + if !ok { + return info.Error("argument for %s must be a private key in pem format", F_PublicKey) + } + + key, err := ParsePrivateKey(str) + if err != nil { + return info.Error("argument for %s must be a private key in pem format: %s", F_PublicKey, err) + } + + var b bytes.Buffer + writer := bufio.NewWriter(&b) + + if err := pem.Encode(writer, pemBlockForPublicKey(publicKey(key))); err != nil { + return info.Error("failed to write public key pem block: %s", err) + } + writer.Flush() + return b.String(), info, true +} diff --git a/dynaml/x509/utils.go b/dynaml/x509/utils.go new file mode 100644 index 0000000..97e4879 --- /dev/null +++ b/dynaml/x509/utils.go @@ -0,0 +1,249 @@ +package x509 + +import ( + "crypto/dsa" + "crypto/ecdsa" + "crypto/rsa" + "crypto/x509" + "encoding/pem" + "fmt" + "log" + "os" + "strings" +) + +func privateKey(block *pem.Block) (interface{}, error) { + x509Encoded := block.Bytes + switch block.Type { + case "RSA PRIVATE KEY": + return x509.ParsePKCS1PrivateKey(x509Encoded) + case "EC PRIVATE KEY": + return x509.ParseECPrivateKey(x509Encoded) + default: + return nil, fmt.Errorf("invalid pem block type %q", block.Type) + } +} + +func publicKey(priv interface{}) interface{} { + switch k := priv.(type) { + case *rsa.PrivateKey: + return &k.PublicKey + case *ecdsa.PrivateKey: + return &k.PublicKey + default: + return nil + } +} + +func pemBlockForKey(priv interface{}) *pem.Block { + switch k := priv.(type) { + case *rsa.PrivateKey: + return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)} + case *ecdsa.PrivateKey: + b, err := x509.MarshalECPrivateKey(k) + if err != nil { + fmt.Fprintf(os.Stderr, "Unable to marshal ECDSA private key: %v", err) + os.Exit(2) + } + return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b} + default: + log.Fatal("invalid key") + return nil + } +} + +func pemBlockForPublicKey(priv interface{}) *pem.Block { + switch k := priv.(type) { + case *rsa.PublicKey: + return &pem.Block{Type: "RSA PUBLIC KEY", Bytes: x509.MarshalPKCS1PublicKey(k)} + case *ecdsa.PublicKey: + b, err := x509.MarshalPKIXPublicKey(k) + if err != nil { + return nil + } + return &pem.Block{Type: "ECDSA PUBLIC KEY", Bytes: b} + default: + return nil + } +} + +func ParsePublicKey(data string) (interface{}, error) { + block, _ := pem.Decode([]byte(data)) + if block == nil { + return nil, fmt.Errorf("invalid private key format (expected pem block)") + } + pub, err := x509.ParsePKIXPublicKey(block.Bytes) + if err != nil { + pub, err = x509.ParsePKCS1PublicKey(block.Bytes) + if err != nil { + return nil, fmt.Errorf("failed to parse DER encoded public key: %s", err) + } + return pub, nil + } + switch pub := pub.(type) { + case *rsa.PublicKey: + return pub, nil + case *dsa.PublicKey: + return pub, nil + case *ecdsa.PublicKey: + return pub, nil + default: + return nil, fmt.Errorf("unknown type of public key") + } +} + +func ParsePrivateKey(data string) (interface{}, error) { + block, _ := pem.Decode([]byte(data)) + if block == nil { + return nil, fmt.Errorf("invalid private key format (expected pem block)") + } + return privateKey(block) +} + +func ParseCertificate(data string) (*x509.Certificate, error) { + block, _ := pem.Decode([]byte(data)) + if block == nil { + return nil, fmt.Errorf("invalid certificate format (expected pem block)") + } + if block.Type != "CERTIFICATE" { + return nil, fmt.Errorf("unexpected pem block type for certificate: %q", block.Type) + } + return x509.ParseCertificate(block.Bytes) +} + +//////////////////////////////////////////////////////////////////////////////// + +type KeyUsage interface { + String() string + AddTo(*x509.Certificate) +} + +type _keyUsage x509.KeyUsage + +func (this _keyUsage) AddTo(cert *x509.Certificate) { + cert.KeyUsage = cert.KeyUsage | x509.KeyUsage(this) +} + +func (this _keyUsage) String() string { + switch x509.KeyUsage(this) { + case x509.KeyUsageDigitalSignature: + return "Signature" + case x509.KeyUsageContentCommitment: + return "ContentCommitment" + case x509.KeyUsageKeyEncipherment: + return "KeyEncipherment" + case x509.KeyUsageDataEncipherment: + return "DataEncipherment" + case x509.KeyUsageKeyAgreement: + return "KeyAgreement" + case x509.KeyUsageCertSign: + return "CertSign" + case x509.KeyUsageCRLSign: + return "CRLSign" + case x509.KeyUsageEncipherOnly: + return "EncipherOnly" + case x509.KeyUsageDecipherOnly: + return "DecipherOnly" + default: + return "UnknownKeyUsage" + } +} + +type _extKeyUsage x509.ExtKeyUsage + +func (this _extKeyUsage) AddTo(cert *x509.Certificate) { + for _, k := range cert.ExtKeyUsage { + if k == x509.ExtKeyUsage(this) { + return + } + } + cert.ExtKeyUsage = append(cert.ExtKeyUsage, x509.ExtKeyUsage(this)) +} + +func (this _extKeyUsage) String() string { + switch x509.ExtKeyUsage(this) { + case x509.ExtKeyUsageAny: + return "Any" + case x509.ExtKeyUsageServerAuth: + return "ServerAuth" + case x509.ExtKeyUsageClientAuth: + return "ClientAuth" + case x509.ExtKeyUsageCodeSigning: + return "CodeSigning" + case x509.ExtKeyUsageEmailProtection: + return "EmailProtection" + case x509.ExtKeyUsageIPSECEndSystem: + return "IPSECEndSystem" + case x509.ExtKeyUsageIPSECTunnel: + return "IPSECTunnel" + case x509.ExtKeyUsageIPSECUser: + return "IPSECUser" + case x509.ExtKeyUsageTimeStamping: + return "TimeStamping" + case x509.ExtKeyUsageOCSPSigning: + return "OCSPSigning" + case x509.ExtKeyUsageMicrosoftServerGatedCrypto: + return "MicrosoftServerGatedCrypto" + case x509.ExtKeyUsageNetscapeServerGatedCrypto: + return "NetscapeServerGatedCrypto" + case x509.ExtKeyUsageMicrosoftCommercialCodeSigning: + return "MicrosoftCommercialCodeSigning" + case x509.ExtKeyUsageMicrosoftKernelCodeSigning: + return "MicrosoftKernelCodeSigning" + default: + return "UnknownExtKeyUsage" + } +} + +func ParseKeyUsage(name string) KeyUsage { + switch strings.ToLower(name) { + case "signature": + return _keyUsage(x509.KeyUsageDigitalSignature) + case "commitment": + return _keyUsage(x509.KeyUsageContentCommitment) + case "keyencipherment": + return _keyUsage(x509.KeyUsageKeyEncipherment) + case "dataencipherment": + return _keyUsage(x509.KeyUsageDataEncipherment) + case "keyagreement": + return _keyUsage(x509.KeyUsageKeyAgreement) + case "certsign": + return _keyUsage(x509.KeyUsageCertSign) + case "crlsign": + return _keyUsage(x509.KeyUsageCRLSign) + case "encipheronly": + return _keyUsage(x509.KeyUsageEncipherOnly) + case "decipheronly": + return _keyUsage(x509.KeyUsageDecipherOnly) + + case "any": + return _extKeyUsage(x509.ExtKeyUsageAny) + case "serverauth": + return _extKeyUsage(x509.ExtKeyUsageServerAuth) + case "clientauth": + return _extKeyUsage(x509.ExtKeyUsageClientAuth) + case "codesigning": + return _extKeyUsage(x509.ExtKeyUsageCodeSigning) + case "emailprotection": + return _extKeyUsage(x509.ExtKeyUsageEmailProtection) + case "ipsecendsystem": + return _extKeyUsage(x509.ExtKeyUsageIPSECEndSystem) + case "ipsectunnel": + return _extKeyUsage(x509.ExtKeyUsageIPSECTunnel) + case "ipsecuser": + return _extKeyUsage(x509.ExtKeyUsageIPSECUser) + case "timestamping": + return _extKeyUsage(x509.ExtKeyUsageTimeStamping) + case "ocspsigning": + return _extKeyUsage(x509.ExtKeyUsageOCSPSigning) + case "microsoftservergatedcrypto": + return _extKeyUsage(x509.ExtKeyUsageMicrosoftServerGatedCrypto) + case "netscapeservergatedcrypto": + return _extKeyUsage(x509.ExtKeyUsageNetscapeServerGatedCrypto) + case "microsoftcommercialcodesigning": + return _extKeyUsage(x509.ExtKeyUsageMicrosoftCommercialCodeSigning) + case "microsoftkernelcodesigning": + return _extKeyUsage(x509.ExtKeyUsageMicrosoftKernelCodeSigning) + } + return nil +} diff --git a/flow/flow.go b/flow/flow.go index cc1a26d..ee73fd7 100644 --- a/flow/flow.go +++ b/flow/flow.go @@ -8,6 +8,8 @@ import ( "github.com/mandelsoft/spiff/debug" "github.com/mandelsoft/spiff/dynaml" "github.com/mandelsoft/spiff/yaml" + + _ "github.com/mandelsoft/spiff/dynaml/x509" ) func Flow(source yaml.Node, stubs ...yaml.Node) (yaml.Node, error) { From 1d3660827b829aba3eddc3e2565140d78d42d412 Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Mon, 31 Dec 2018 18:37:32 +0100 Subject: [PATCH 11/13] improve expression list parsing (support empty list) --- dynaml/dynaml.peg | 11 +- dynaml/dynaml.peg.go | 1551 +++++++++++++++++++++-------------------- dynaml/parser.go | 40 +- dynaml/parser_test.go | 10 + 4 files changed, 812 insertions(+), 800 deletions(-) diff --git a/dynaml/dynaml.peg b/dynaml/dynaml.peg index d7ab513..83f8428 100644 --- a/dynaml/dynaml.peg +++ b/dynaml/dynaml.peg @@ -46,9 +46,10 @@ ChainedQualifiedExpression <- ChainedCall / ( '.' ( ChainedRef / ChainedDynRef / ChainedRef <- ( Key / Index ) FollowUpRef ChainedDynRef <- '[' Expression ']' Slice <- Range -ChainedCall <- '(' Arguments ')' -Arguments <- Expression (NextExpression)* -NextExpression <- ',' Expression +ChainedCall <- StartArguments ExpressionList? ')' +StartArguments <- '(' ws +ExpressionList <- NextExpression ( ',' NextExpression)* +NextExpression <- Expression Projection <- ( '[*]' / Slice ) ProjectionValue ChainedQualifiedExpression* ProjectionValue <- {} @@ -64,8 +65,8 @@ Boolean <- 'true' / 'false' Nil <- 'nil' / '~' Undefined <- '~~' -List <- '[' Contents? ']' -Contents <- Expression (NextExpression)* +List <- StartList ExpressionList? ']' +StartList <- '[' Map <- CreateMap ws Assignments? '}' CreateMap <- '{' diff --git a/dynaml/dynaml.peg.go b/dynaml/dynaml.peg.go index 4e460e0..89daf68 100644 --- a/dynaml/dynaml.peg.go +++ b/dynaml/dynaml.peg.go @@ -47,7 +47,8 @@ const ( ruleChainedDynRef ruleSlice ruleChainedCall - ruleArguments + ruleStartArguments + ruleExpressionList ruleNextExpression ruleProjection ruleProjectionValue @@ -61,7 +62,7 @@ const ( ruleNil ruleUndefined ruleList - ruleContents + ruleStartList ruleMap ruleCreateMap ruleAssignments @@ -129,7 +130,8 @@ var rul3s = [...]string{ "ChainedDynRef", "Slice", "ChainedCall", - "Arguments", + "StartArguments", + "ExpressionList", "NextExpression", "Projection", "ProjectionValue", @@ -143,7 +145,7 @@ var rul3s = [...]string{ "Nil", "Undefined", "List", - "Contents", + "StartList", "Map", "CreateMap", "Assignments", @@ -488,7 +490,7 @@ func (t *tokens32) Expand(index int) tokenTree { type DynamlGrammar struct { Buffer string buffer []rune - rules [75]func() bool + rules [76]func() bool Parse func(rule ...int) error Reset func() Pretty bool @@ -1847,19 +1849,25 @@ func (p *DynamlGrammar) Init() { position, tokenIndex, depth = position131, tokenIndex131, depth131 return false }, - /* 32 ChainedCall <- <('(' Arguments ')')> */ + /* 32 ChainedCall <- <(StartArguments ExpressionList? ')')> */ func() bool { position133, tokenIndex133, depth133 := position, tokenIndex, depth { position134 := position depth++ - if buffer[position] != rune('(') { + if !_rules[ruleStartArguments]() { goto l133 } - position++ - if !_rules[ruleArguments]() { - goto l133 + { + position135, tokenIndex135, depth135 := position, tokenIndex, depth + if !_rules[ruleExpressionList]() { + goto l135 + } + goto l136 + l135: + position, tokenIndex, depth = position135, tokenIndex135, depth135 } + l136: if buffer[position] != rune(')') { goto l133 } @@ -1872,1585 +1880,1577 @@ func (p *DynamlGrammar) Init() { position, tokenIndex, depth = position133, tokenIndex133, depth133 return false }, - /* 33 Arguments <- <(Expression NextExpression*)> */ + /* 33 StartArguments <- <('(' ws)> */ func() bool { - position135, tokenIndex135, depth135 := position, tokenIndex, depth + position137, tokenIndex137, depth137 := position, tokenIndex, depth { - position136 := position + position138 := position depth++ - if !_rules[ruleExpression]() { - goto l135 + if buffer[position] != rune('(') { + goto l137 } - l137: - { - position138, tokenIndex138, depth138 := position, tokenIndex, depth - if !_rules[ruleNextExpression]() { - goto l138 - } + position++ + if !_rules[rulews]() { goto l137 - l138: - position, tokenIndex, depth = position138, tokenIndex138, depth138 } depth-- - add(ruleArguments, position136) + add(ruleStartArguments, position138) } return true - l135: - position, tokenIndex, depth = position135, tokenIndex135, depth135 + l137: + position, tokenIndex, depth = position137, tokenIndex137, depth137 return false }, - /* 34 NextExpression <- <(',' Expression)> */ + /* 34 ExpressionList <- <(NextExpression (',' NextExpression)*)> */ func() bool { position139, tokenIndex139, depth139 := position, tokenIndex, depth { position140 := position depth++ - if buffer[position] != rune(',') { + if !_rules[ruleNextExpression]() { goto l139 } - position++ - if !_rules[ruleExpression]() { - goto l139 + l141: + { + position142, tokenIndex142, depth142 := position, tokenIndex, depth + if buffer[position] != rune(',') { + goto l142 + } + position++ + if !_rules[ruleNextExpression]() { + goto l142 + } + goto l141 + l142: + position, tokenIndex, depth = position142, tokenIndex142, depth142 } depth-- - add(ruleNextExpression, position140) + add(ruleExpressionList, position140) } return true l139: position, tokenIndex, depth = position139, tokenIndex139, depth139 return false }, - /* 35 Projection <- <((('[' '*' ']') / Slice) ProjectionValue ChainedQualifiedExpression*)> */ + /* 35 NextExpression <- */ + func() bool { + position143, tokenIndex143, depth143 := position, tokenIndex, depth + { + position144 := position + depth++ + if !_rules[ruleExpression]() { + goto l143 + } + depth-- + add(ruleNextExpression, position144) + } + return true + l143: + position, tokenIndex, depth = position143, tokenIndex143, depth143 + return false + }, + /* 36 Projection <- <((('[' '*' ']') / Slice) ProjectionValue ChainedQualifiedExpression*)> */ func() bool { - position141, tokenIndex141, depth141 := position, tokenIndex, depth + position145, tokenIndex145, depth145 := position, tokenIndex, depth { - position142 := position + position146 := position depth++ { - position143, tokenIndex143, depth143 := position, tokenIndex, depth + position147, tokenIndex147, depth147 := position, tokenIndex, depth if buffer[position] != rune('[') { - goto l144 + goto l148 } position++ if buffer[position] != rune('*') { - goto l144 + goto l148 } position++ if buffer[position] != rune(']') { - goto l144 + goto l148 } position++ - goto l143 - l144: - position, tokenIndex, depth = position143, tokenIndex143, depth143 + goto l147 + l148: + position, tokenIndex, depth = position147, tokenIndex147, depth147 if !_rules[ruleSlice]() { - goto l141 + goto l145 } } - l143: + l147: if !_rules[ruleProjectionValue]() { - goto l141 + goto l145 } - l145: + l149: { - position146, tokenIndex146, depth146 := position, tokenIndex, depth + position150, tokenIndex150, depth150 := position, tokenIndex, depth if !_rules[ruleChainedQualifiedExpression]() { - goto l146 + goto l150 } - goto l145 - l146: - position, tokenIndex, depth = position146, tokenIndex146, depth146 + goto l149 + l150: + position, tokenIndex, depth = position150, tokenIndex150, depth150 } depth-- - add(ruleProjection, position142) + add(ruleProjection, position146) } return true - l141: - position, tokenIndex, depth = position141, tokenIndex141, depth141 + l145: + position, tokenIndex, depth = position145, tokenIndex145, depth145 return false }, - /* 36 ProjectionValue <- */ + /* 37 ProjectionValue <- */ func() bool { - position147, tokenIndex147, depth147 := position, tokenIndex, depth + position151, tokenIndex151, depth151 := position, tokenIndex, depth { - position148 := position + position152 := position depth++ if !_rules[ruleAction0]() { - goto l147 + goto l151 } depth-- - add(ruleProjectionValue, position148) + add(ruleProjectionValue, position152) } return true - l147: - position, tokenIndex, depth = position147, tokenIndex147, depth147 + l151: + position, tokenIndex, depth = position151, tokenIndex151, depth151 return false }, - /* 37 Substitution <- <('*' Level0)> */ + /* 38 Substitution <- <('*' Level0)> */ func() bool { - position149, tokenIndex149, depth149 := position, tokenIndex, depth + position153, tokenIndex153, depth153 := position, tokenIndex, depth { - position150 := position + position154 := position depth++ if buffer[position] != rune('*') { - goto l149 + goto l153 } position++ if !_rules[ruleLevel0]() { - goto l149 + goto l153 } depth-- - add(ruleSubstitution, position150) + add(ruleSubstitution, position154) } return true - l149: - position, tokenIndex, depth = position149, tokenIndex149, depth149 + l153: + position, tokenIndex, depth = position153, tokenIndex153, depth153 return false }, - /* 38 Not <- <('!' ws Level0)> */ + /* 39 Not <- <('!' ws Level0)> */ func() bool { - position151, tokenIndex151, depth151 := position, tokenIndex, depth + position155, tokenIndex155, depth155 := position, tokenIndex, depth { - position152 := position + position156 := position depth++ if buffer[position] != rune('!') { - goto l151 + goto l155 } position++ if !_rules[rulews]() { - goto l151 + goto l155 } if !_rules[ruleLevel0]() { - goto l151 + goto l155 } depth-- - add(ruleNot, position152) + add(ruleNot, position156) } return true - l151: - position, tokenIndex, depth = position151, tokenIndex151, depth151 + l155: + position, tokenIndex, depth = position155, tokenIndex155, depth155 return false }, - /* 39 Grouped <- <('(' Expression ')')> */ + /* 40 Grouped <- <('(' Expression ')')> */ func() bool { - position153, tokenIndex153, depth153 := position, tokenIndex, depth + position157, tokenIndex157, depth157 := position, tokenIndex, depth { - position154 := position + position158 := position depth++ if buffer[position] != rune('(') { - goto l153 + goto l157 } position++ if !_rules[ruleExpression]() { - goto l153 + goto l157 } if buffer[position] != rune(')') { - goto l153 + goto l157 } position++ depth-- - add(ruleGrouped, position154) + add(ruleGrouped, position158) } return true - l153: - position, tokenIndex, depth = position153, tokenIndex153, depth153 + l157: + position, tokenIndex, depth = position157, tokenIndex157, depth157 return false }, - /* 40 Range <- <('[' Expression ('.' '.') Expression ']')> */ + /* 41 Range <- <('[' Expression ('.' '.') Expression ']')> */ func() bool { - position155, tokenIndex155, depth155 := position, tokenIndex, depth + position159, tokenIndex159, depth159 := position, tokenIndex, depth { - position156 := position + position160 := position depth++ if buffer[position] != rune('[') { - goto l155 + goto l159 } position++ if !_rules[ruleExpression]() { - goto l155 + goto l159 } if buffer[position] != rune('.') { - goto l155 + goto l159 } position++ if buffer[position] != rune('.') { - goto l155 + goto l159 } position++ if !_rules[ruleExpression]() { - goto l155 + goto l159 } if buffer[position] != rune(']') { - goto l155 + goto l159 } position++ depth-- - add(ruleRange, position156) + add(ruleRange, position160) } return true - l155: - position, tokenIndex, depth = position155, tokenIndex155, depth155 + l159: + position, tokenIndex, depth = position159, tokenIndex159, depth159 return false }, - /* 41 Integer <- <('-'? [0-9] ([0-9] / '_')*)> */ + /* 42 Integer <- <('-'? [0-9] ([0-9] / '_')*)> */ func() bool { - position157, tokenIndex157, depth157 := position, tokenIndex, depth + position161, tokenIndex161, depth161 := position, tokenIndex, depth { - position158 := position + position162 := position depth++ { - position159, tokenIndex159, depth159 := position, tokenIndex, depth + position163, tokenIndex163, depth163 := position, tokenIndex, depth if buffer[position] != rune('-') { - goto l159 + goto l163 } position++ - goto l160 - l159: - position, tokenIndex, depth = position159, tokenIndex159, depth159 + goto l164 + l163: + position, tokenIndex, depth = position163, tokenIndex163, depth163 } - l160: + l164: if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l157 + goto l161 } position++ - l161: + l165: { - position162, tokenIndex162, depth162 := position, tokenIndex, depth + position166, tokenIndex166, depth166 := position, tokenIndex, depth { - position163, tokenIndex163, depth163 := position, tokenIndex, depth + position167, tokenIndex167, depth167 := position, tokenIndex, depth if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l164 + goto l168 } position++ - goto l163 - l164: - position, tokenIndex, depth = position163, tokenIndex163, depth163 + goto l167 + l168: + position, tokenIndex, depth = position167, tokenIndex167, depth167 if buffer[position] != rune('_') { - goto l162 + goto l166 } position++ } - l163: - goto l161 - l162: - position, tokenIndex, depth = position162, tokenIndex162, depth162 + l167: + goto l165 + l166: + position, tokenIndex, depth = position166, tokenIndex166, depth166 } depth-- - add(ruleInteger, position158) + add(ruleInteger, position162) } return true - l157: - position, tokenIndex, depth = position157, tokenIndex157, depth157 + l161: + position, tokenIndex, depth = position161, tokenIndex161, depth161 return false }, - /* 42 String <- <('"' (('\\' '"') / (!'"' .))* '"')> */ + /* 43 String <- <('"' (('\\' '"') / (!'"' .))* '"')> */ func() bool { - position165, tokenIndex165, depth165 := position, tokenIndex, depth + position169, tokenIndex169, depth169 := position, tokenIndex, depth { - position166 := position + position170 := position depth++ if buffer[position] != rune('"') { - goto l165 + goto l169 } position++ - l167: + l171: { - position168, tokenIndex168, depth168 := position, tokenIndex, depth + position172, tokenIndex172, depth172 := position, tokenIndex, depth { - position169, tokenIndex169, depth169 := position, tokenIndex, depth + position173, tokenIndex173, depth173 := position, tokenIndex, depth if buffer[position] != rune('\\') { - goto l170 + goto l174 } position++ if buffer[position] != rune('"') { - goto l170 + goto l174 } position++ - goto l169 - l170: - position, tokenIndex, depth = position169, tokenIndex169, depth169 + goto l173 + l174: + position, tokenIndex, depth = position173, tokenIndex173, depth173 { - position171, tokenIndex171, depth171 := position, tokenIndex, depth + position175, tokenIndex175, depth175 := position, tokenIndex, depth if buffer[position] != rune('"') { - goto l171 + goto l175 } position++ - goto l168 - l171: - position, tokenIndex, depth = position171, tokenIndex171, depth171 + goto l172 + l175: + position, tokenIndex, depth = position175, tokenIndex175, depth175 } if !matchDot() { - goto l168 + goto l172 } } - l169: - goto l167 - l168: - position, tokenIndex, depth = position168, tokenIndex168, depth168 + l173: + goto l171 + l172: + position, tokenIndex, depth = position172, tokenIndex172, depth172 } if buffer[position] != rune('"') { - goto l165 + goto l169 } position++ depth-- - add(ruleString, position166) + add(ruleString, position170) } return true - l165: - position, tokenIndex, depth = position165, tokenIndex165, depth165 + l169: + position, tokenIndex, depth = position169, tokenIndex169, depth169 return false }, - /* 43 Boolean <- <(('t' 'r' 'u' 'e') / ('f' 'a' 'l' 's' 'e'))> */ + /* 44 Boolean <- <(('t' 'r' 'u' 'e') / ('f' 'a' 'l' 's' 'e'))> */ func() bool { - position172, tokenIndex172, depth172 := position, tokenIndex, depth + position176, tokenIndex176, depth176 := position, tokenIndex, depth { - position173 := position + position177 := position depth++ { - position174, tokenIndex174, depth174 := position, tokenIndex, depth + position178, tokenIndex178, depth178 := position, tokenIndex, depth if buffer[position] != rune('t') { - goto l175 + goto l179 } position++ if buffer[position] != rune('r') { - goto l175 + goto l179 } position++ if buffer[position] != rune('u') { - goto l175 + goto l179 } position++ if buffer[position] != rune('e') { - goto l175 + goto l179 } position++ - goto l174 - l175: - position, tokenIndex, depth = position174, tokenIndex174, depth174 + goto l178 + l179: + position, tokenIndex, depth = position178, tokenIndex178, depth178 if buffer[position] != rune('f') { - goto l172 + goto l176 } position++ if buffer[position] != rune('a') { - goto l172 + goto l176 } position++ if buffer[position] != rune('l') { - goto l172 + goto l176 } position++ if buffer[position] != rune('s') { - goto l172 + goto l176 } position++ if buffer[position] != rune('e') { - goto l172 + goto l176 } position++ } - l174: + l178: depth-- - add(ruleBoolean, position173) + add(ruleBoolean, position177) } return true - l172: - position, tokenIndex, depth = position172, tokenIndex172, depth172 + l176: + position, tokenIndex, depth = position176, tokenIndex176, depth176 return false }, - /* 44 Nil <- <(('n' 'i' 'l') / '~')> */ + /* 45 Nil <- <(('n' 'i' 'l') / '~')> */ func() bool { - position176, tokenIndex176, depth176 := position, tokenIndex, depth + position180, tokenIndex180, depth180 := position, tokenIndex, depth { - position177 := position + position181 := position depth++ { - position178, tokenIndex178, depth178 := position, tokenIndex, depth + position182, tokenIndex182, depth182 := position, tokenIndex, depth if buffer[position] != rune('n') { - goto l179 + goto l183 } position++ if buffer[position] != rune('i') { - goto l179 + goto l183 } position++ if buffer[position] != rune('l') { - goto l179 + goto l183 } position++ - goto l178 - l179: - position, tokenIndex, depth = position178, tokenIndex178, depth178 + goto l182 + l183: + position, tokenIndex, depth = position182, tokenIndex182, depth182 if buffer[position] != rune('~') { - goto l176 + goto l180 } position++ } - l178: + l182: depth-- - add(ruleNil, position177) + add(ruleNil, position181) } return true - l176: - position, tokenIndex, depth = position176, tokenIndex176, depth176 + l180: + position, tokenIndex, depth = position180, tokenIndex180, depth180 return false }, - /* 45 Undefined <- <('~' '~')> */ + /* 46 Undefined <- <('~' '~')> */ func() bool { - position180, tokenIndex180, depth180 := position, tokenIndex, depth + position184, tokenIndex184, depth184 := position, tokenIndex, depth { - position181 := position + position185 := position depth++ if buffer[position] != rune('~') { - goto l180 + goto l184 } position++ if buffer[position] != rune('~') { - goto l180 + goto l184 } position++ depth-- - add(ruleUndefined, position181) + add(ruleUndefined, position185) } return true - l180: - position, tokenIndex, depth = position180, tokenIndex180, depth180 + l184: + position, tokenIndex, depth = position184, tokenIndex184, depth184 return false }, - /* 46 List <- <('[' Contents? ']')> */ + /* 47 List <- <(StartList ExpressionList? ']')> */ func() bool { - position182, tokenIndex182, depth182 := position, tokenIndex, depth + position186, tokenIndex186, depth186 := position, tokenIndex, depth { - position183 := position + position187 := position depth++ - if buffer[position] != rune('[') { - goto l182 + if !_rules[ruleStartList]() { + goto l186 } - position++ { - position184, tokenIndex184, depth184 := position, tokenIndex, depth - if !_rules[ruleContents]() { - goto l184 + position188, tokenIndex188, depth188 := position, tokenIndex, depth + if !_rules[ruleExpressionList]() { + goto l188 } - goto l185 - l184: - position, tokenIndex, depth = position184, tokenIndex184, depth184 + goto l189 + l188: + position, tokenIndex, depth = position188, tokenIndex188, depth188 } - l185: + l189: if buffer[position] != rune(']') { - goto l182 + goto l186 } position++ depth-- - add(ruleList, position183) + add(ruleList, position187) } return true - l182: - position, tokenIndex, depth = position182, tokenIndex182, depth182 + l186: + position, tokenIndex, depth = position186, tokenIndex186, depth186 return false }, - /* 47 Contents <- <(Expression NextExpression*)> */ + /* 48 StartList <- <'['> */ func() bool { - position186, tokenIndex186, depth186 := position, tokenIndex, depth + position190, tokenIndex190, depth190 := position, tokenIndex, depth { - position187 := position + position191 := position depth++ - if !_rules[ruleExpression]() { - goto l186 - } - l188: - { - position189, tokenIndex189, depth189 := position, tokenIndex, depth - if !_rules[ruleNextExpression]() { - goto l189 - } - goto l188 - l189: - position, tokenIndex, depth = position189, tokenIndex189, depth189 + if buffer[position] != rune('[') { + goto l190 } + position++ depth-- - add(ruleContents, position187) + add(ruleStartList, position191) } return true - l186: - position, tokenIndex, depth = position186, tokenIndex186, depth186 + l190: + position, tokenIndex, depth = position190, tokenIndex190, depth190 return false }, - /* 48 Map <- <(CreateMap ws Assignments? '}')> */ + /* 49 Map <- <(CreateMap ws Assignments? '}')> */ func() bool { - position190, tokenIndex190, depth190 := position, tokenIndex, depth + position192, tokenIndex192, depth192 := position, tokenIndex, depth { - position191 := position + position193 := position depth++ if !_rules[ruleCreateMap]() { - goto l190 + goto l192 } if !_rules[rulews]() { - goto l190 + goto l192 } { - position192, tokenIndex192, depth192 := position, tokenIndex, depth + position194, tokenIndex194, depth194 := position, tokenIndex, depth if !_rules[ruleAssignments]() { - goto l192 + goto l194 } - goto l193 - l192: - position, tokenIndex, depth = position192, tokenIndex192, depth192 + goto l195 + l194: + position, tokenIndex, depth = position194, tokenIndex194, depth194 } - l193: + l195: if buffer[position] != rune('}') { - goto l190 + goto l192 } position++ depth-- - add(ruleMap, position191) + add(ruleMap, position193) } return true - l190: - position, tokenIndex, depth = position190, tokenIndex190, depth190 + l192: + position, tokenIndex, depth = position192, tokenIndex192, depth192 return false }, - /* 49 CreateMap <- <'{'> */ + /* 50 CreateMap <- <'{'> */ func() bool { - position194, tokenIndex194, depth194 := position, tokenIndex, depth + position196, tokenIndex196, depth196 := position, tokenIndex, depth { - position195 := position + position197 := position depth++ if buffer[position] != rune('{') { - goto l194 + goto l196 } position++ depth-- - add(ruleCreateMap, position195) + add(ruleCreateMap, position197) } return true - l194: - position, tokenIndex, depth = position194, tokenIndex194, depth194 + l196: + position, tokenIndex, depth = position196, tokenIndex196, depth196 return false }, - /* 50 Assignments <- <(Assignment (',' Assignment)*)> */ + /* 51 Assignments <- <(Assignment (',' Assignment)*)> */ func() bool { - position196, tokenIndex196, depth196 := position, tokenIndex, depth + position198, tokenIndex198, depth198 := position, tokenIndex, depth { - position197 := position + position199 := position depth++ if !_rules[ruleAssignment]() { - goto l196 + goto l198 } - l198: + l200: { - position199, tokenIndex199, depth199 := position, tokenIndex, depth + position201, tokenIndex201, depth201 := position, tokenIndex, depth if buffer[position] != rune(',') { - goto l199 + goto l201 } position++ if !_rules[ruleAssignment]() { - goto l199 + goto l201 } - goto l198 - l199: - position, tokenIndex, depth = position199, tokenIndex199, depth199 + goto l200 + l201: + position, tokenIndex, depth = position201, tokenIndex201, depth201 } depth-- - add(ruleAssignments, position197) + add(ruleAssignments, position199) } return true - l196: - position, tokenIndex, depth = position196, tokenIndex196, depth196 + l198: + position, tokenIndex, depth = position198, tokenIndex198, depth198 return false }, - /* 51 Assignment <- <(Expression '=' Expression)> */ + /* 52 Assignment <- <(Expression '=' Expression)> */ func() bool { - position200, tokenIndex200, depth200 := position, tokenIndex, depth + position202, tokenIndex202, depth202 := position, tokenIndex, depth { - position201 := position + position203 := position depth++ if !_rules[ruleExpression]() { - goto l200 + goto l202 } if buffer[position] != rune('=') { - goto l200 + goto l202 } position++ if !_rules[ruleExpression]() { - goto l200 + goto l202 } depth-- - add(ruleAssignment, position201) + add(ruleAssignment, position203) } return true - l200: - position, tokenIndex, depth = position200, tokenIndex200, depth200 + l202: + position, tokenIndex, depth = position202, tokenIndex202, depth202 return false }, - /* 52 Merge <- <(RefMerge / SimpleMerge)> */ + /* 53 Merge <- <(RefMerge / SimpleMerge)> */ func() bool { - position202, tokenIndex202, depth202 := position, tokenIndex, depth + position204, tokenIndex204, depth204 := position, tokenIndex, depth { - position203 := position + position205 := position depth++ { - position204, tokenIndex204, depth204 := position, tokenIndex, depth + position206, tokenIndex206, depth206 := position, tokenIndex, depth if !_rules[ruleRefMerge]() { - goto l205 + goto l207 } - goto l204 - l205: - position, tokenIndex, depth = position204, tokenIndex204, depth204 + goto l206 + l207: + position, tokenIndex, depth = position206, tokenIndex206, depth206 if !_rules[ruleSimpleMerge]() { - goto l202 + goto l204 } } - l204: + l206: depth-- - add(ruleMerge, position203) + add(ruleMerge, position205) } return true - l202: - position, tokenIndex, depth = position202, tokenIndex202, depth202 + l204: + position, tokenIndex, depth = position204, tokenIndex204, depth204 return false }, - /* 53 RefMerge <- <('m' 'e' 'r' 'g' 'e' !(req_ws Required) (req_ws (Replace / On))? req_ws Reference)> */ + /* 54 RefMerge <- <('m' 'e' 'r' 'g' 'e' !(req_ws Required) (req_ws (Replace / On))? req_ws Reference)> */ func() bool { - position206, tokenIndex206, depth206 := position, tokenIndex, depth + position208, tokenIndex208, depth208 := position, tokenIndex, depth { - position207 := position + position209 := position depth++ if buffer[position] != rune('m') { - goto l206 + goto l208 } position++ if buffer[position] != rune('e') { - goto l206 + goto l208 } position++ if buffer[position] != rune('r') { - goto l206 + goto l208 } position++ if buffer[position] != rune('g') { - goto l206 + goto l208 } position++ if buffer[position] != rune('e') { - goto l206 + goto l208 } position++ { - position208, tokenIndex208, depth208 := position, tokenIndex, depth + position210, tokenIndex210, depth210 := position, tokenIndex, depth if !_rules[rulereq_ws]() { - goto l208 + goto l210 } if !_rules[ruleRequired]() { - goto l208 + goto l210 } - goto l206 - l208: - position, tokenIndex, depth = position208, tokenIndex208, depth208 + goto l208 + l210: + position, tokenIndex, depth = position210, tokenIndex210, depth210 } { - position209, tokenIndex209, depth209 := position, tokenIndex, depth + position211, tokenIndex211, depth211 := position, tokenIndex, depth if !_rules[rulereq_ws]() { - goto l209 + goto l211 } { - position211, tokenIndex211, depth211 := position, tokenIndex, depth + position213, tokenIndex213, depth213 := position, tokenIndex, depth if !_rules[ruleReplace]() { - goto l212 + goto l214 } - goto l211 - l212: - position, tokenIndex, depth = position211, tokenIndex211, depth211 + goto l213 + l214: + position, tokenIndex, depth = position213, tokenIndex213, depth213 if !_rules[ruleOn]() { - goto l209 + goto l211 } } + l213: + goto l212 l211: - goto l210 - l209: - position, tokenIndex, depth = position209, tokenIndex209, depth209 + position, tokenIndex, depth = position211, tokenIndex211, depth211 } - l210: + l212: if !_rules[rulereq_ws]() { - goto l206 + goto l208 } if !_rules[ruleReference]() { - goto l206 + goto l208 } depth-- - add(ruleRefMerge, position207) + add(ruleRefMerge, position209) } return true - l206: - position, tokenIndex, depth = position206, tokenIndex206, depth206 + l208: + position, tokenIndex, depth = position208, tokenIndex208, depth208 return false }, - /* 54 SimpleMerge <- <('m' 'e' 'r' 'g' 'e' !'(' (req_ws (Replace / Required / On))?)> */ + /* 55 SimpleMerge <- <('m' 'e' 'r' 'g' 'e' !'(' (req_ws (Replace / Required / On))?)> */ func() bool { - position213, tokenIndex213, depth213 := position, tokenIndex, depth + position215, tokenIndex215, depth215 := position, tokenIndex, depth { - position214 := position + position216 := position depth++ if buffer[position] != rune('m') { - goto l213 + goto l215 } position++ if buffer[position] != rune('e') { - goto l213 + goto l215 } position++ if buffer[position] != rune('r') { - goto l213 + goto l215 } position++ if buffer[position] != rune('g') { - goto l213 + goto l215 } position++ if buffer[position] != rune('e') { - goto l213 + goto l215 } position++ { - position215, tokenIndex215, depth215 := position, tokenIndex, depth + position217, tokenIndex217, depth217 := position, tokenIndex, depth if buffer[position] != rune('(') { - goto l215 + goto l217 } position++ - goto l213 - l215: - position, tokenIndex, depth = position215, tokenIndex215, depth215 + goto l215 + l217: + position, tokenIndex, depth = position217, tokenIndex217, depth217 } { - position216, tokenIndex216, depth216 := position, tokenIndex, depth + position218, tokenIndex218, depth218 := position, tokenIndex, depth if !_rules[rulereq_ws]() { - goto l216 + goto l218 } { - position218, tokenIndex218, depth218 := position, tokenIndex, depth + position220, tokenIndex220, depth220 := position, tokenIndex, depth if !_rules[ruleReplace]() { - goto l219 + goto l221 } - goto l218 - l219: - position, tokenIndex, depth = position218, tokenIndex218, depth218 + goto l220 + l221: + position, tokenIndex, depth = position220, tokenIndex220, depth220 if !_rules[ruleRequired]() { - goto l220 + goto l222 } - goto l218 - l220: - position, tokenIndex, depth = position218, tokenIndex218, depth218 + goto l220 + l222: + position, tokenIndex, depth = position220, tokenIndex220, depth220 if !_rules[ruleOn]() { - goto l216 + goto l218 } } + l220: + goto l219 l218: - goto l217 - l216: - position, tokenIndex, depth = position216, tokenIndex216, depth216 + position, tokenIndex, depth = position218, tokenIndex218, depth218 } - l217: + l219: depth-- - add(ruleSimpleMerge, position214) + add(ruleSimpleMerge, position216) } return true - l213: - position, tokenIndex, depth = position213, tokenIndex213, depth213 + l215: + position, tokenIndex, depth = position215, tokenIndex215, depth215 return false }, - /* 55 Replace <- <('r' 'e' 'p' 'l' 'a' 'c' 'e')> */ + /* 56 Replace <- <('r' 'e' 'p' 'l' 'a' 'c' 'e')> */ func() bool { - position221, tokenIndex221, depth221 := position, tokenIndex, depth + position223, tokenIndex223, depth223 := position, tokenIndex, depth { - position222 := position + position224 := position depth++ if buffer[position] != rune('r') { - goto l221 + goto l223 } position++ if buffer[position] != rune('e') { - goto l221 + goto l223 } position++ if buffer[position] != rune('p') { - goto l221 + goto l223 } position++ if buffer[position] != rune('l') { - goto l221 + goto l223 } position++ if buffer[position] != rune('a') { - goto l221 + goto l223 } position++ if buffer[position] != rune('c') { - goto l221 + goto l223 } position++ if buffer[position] != rune('e') { - goto l221 + goto l223 } position++ depth-- - add(ruleReplace, position222) + add(ruleReplace, position224) } return true - l221: - position, tokenIndex, depth = position221, tokenIndex221, depth221 + l223: + position, tokenIndex, depth = position223, tokenIndex223, depth223 return false }, - /* 56 Required <- <('r' 'e' 'q' 'u' 'i' 'r' 'e' 'd')> */ + /* 57 Required <- <('r' 'e' 'q' 'u' 'i' 'r' 'e' 'd')> */ func() bool { - position223, tokenIndex223, depth223 := position, tokenIndex, depth + position225, tokenIndex225, depth225 := position, tokenIndex, depth { - position224 := position + position226 := position depth++ if buffer[position] != rune('r') { - goto l223 + goto l225 } position++ if buffer[position] != rune('e') { - goto l223 + goto l225 } position++ if buffer[position] != rune('q') { - goto l223 + goto l225 } position++ if buffer[position] != rune('u') { - goto l223 + goto l225 } position++ if buffer[position] != rune('i') { - goto l223 + goto l225 } position++ if buffer[position] != rune('r') { - goto l223 + goto l225 } position++ if buffer[position] != rune('e') { - goto l223 + goto l225 } position++ if buffer[position] != rune('d') { - goto l223 + goto l225 } position++ depth-- - add(ruleRequired, position224) + add(ruleRequired, position226) } return true - l223: - position, tokenIndex, depth = position223, tokenIndex223, depth223 - return false + l225: + position, tokenIndex, depth = position225, tokenIndex225, depth225 + return false }, - /* 57 On <- <('o' 'n' req_ws Name)> */ + /* 58 On <- <('o' 'n' req_ws Name)> */ func() bool { - position225, tokenIndex225, depth225 := position, tokenIndex, depth + position227, tokenIndex227, depth227 := position, tokenIndex, depth { - position226 := position + position228 := position depth++ if buffer[position] != rune('o') { - goto l225 + goto l227 } position++ if buffer[position] != rune('n') { - goto l225 + goto l227 } position++ if !_rules[rulereq_ws]() { - goto l225 + goto l227 } if !_rules[ruleName]() { - goto l225 + goto l227 } depth-- - add(ruleOn, position226) + add(ruleOn, position228) } return true - l225: - position, tokenIndex, depth = position225, tokenIndex225, depth225 + l227: + position, tokenIndex, depth = position227, tokenIndex227, depth227 return false }, - /* 58 Auto <- <('a' 'u' 't' 'o')> */ + /* 59 Auto <- <('a' 'u' 't' 'o')> */ func() bool { - position227, tokenIndex227, depth227 := position, tokenIndex, depth + position229, tokenIndex229, depth229 := position, tokenIndex, depth { - position228 := position + position230 := position depth++ if buffer[position] != rune('a') { - goto l227 + goto l229 } position++ if buffer[position] != rune('u') { - goto l227 + goto l229 } position++ if buffer[position] != rune('t') { - goto l227 + goto l229 } position++ if buffer[position] != rune('o') { - goto l227 + goto l229 } position++ depth-- - add(ruleAuto, position228) + add(ruleAuto, position230) } return true - l227: - position, tokenIndex, depth = position227, tokenIndex227, depth227 + l229: + position, tokenIndex, depth = position229, tokenIndex229, depth229 return false }, - /* 59 Mapping <- <('m' 'a' 'p' '[' Level7 (LambdaExpr / ('|' Expression)) ']')> */ + /* 60 Mapping <- <('m' 'a' 'p' '[' Level7 (LambdaExpr / ('|' Expression)) ']')> */ func() bool { - position229, tokenIndex229, depth229 := position, tokenIndex, depth + position231, tokenIndex231, depth231 := position, tokenIndex, depth { - position230 := position + position232 := position depth++ if buffer[position] != rune('m') { - goto l229 + goto l231 } position++ if buffer[position] != rune('a') { - goto l229 + goto l231 } position++ if buffer[position] != rune('p') { - goto l229 + goto l231 } position++ if buffer[position] != rune('[') { - goto l229 + goto l231 } position++ if !_rules[ruleLevel7]() { - goto l229 + goto l231 } { - position231, tokenIndex231, depth231 := position, tokenIndex, depth + position233, tokenIndex233, depth233 := position, tokenIndex, depth if !_rules[ruleLambdaExpr]() { - goto l232 + goto l234 } - goto l231 - l232: - position, tokenIndex, depth = position231, tokenIndex231, depth231 + goto l233 + l234: + position, tokenIndex, depth = position233, tokenIndex233, depth233 if buffer[position] != rune('|') { - goto l229 + goto l231 } position++ if !_rules[ruleExpression]() { - goto l229 + goto l231 } } - l231: + l233: if buffer[position] != rune(']') { - goto l229 + goto l231 } position++ depth-- - add(ruleMapping, position230) + add(ruleMapping, position232) } return true - l229: - position, tokenIndex, depth = position229, tokenIndex229, depth229 + l231: + position, tokenIndex, depth = position231, tokenIndex231, depth231 return false }, - /* 60 Sum <- <('s' 'u' 'm' '[' Level7 '|' Level7 (LambdaExpr / ('|' Expression)) ']')> */ + /* 61 Sum <- <('s' 'u' 'm' '[' Level7 '|' Level7 (LambdaExpr / ('|' Expression)) ']')> */ func() bool { - position233, tokenIndex233, depth233 := position, tokenIndex, depth + position235, tokenIndex235, depth235 := position, tokenIndex, depth { - position234 := position + position236 := position depth++ if buffer[position] != rune('s') { - goto l233 + goto l235 } position++ if buffer[position] != rune('u') { - goto l233 + goto l235 } position++ if buffer[position] != rune('m') { - goto l233 + goto l235 } position++ if buffer[position] != rune('[') { - goto l233 + goto l235 } position++ if !_rules[ruleLevel7]() { - goto l233 + goto l235 } if buffer[position] != rune('|') { - goto l233 + goto l235 } position++ if !_rules[ruleLevel7]() { - goto l233 + goto l235 } { - position235, tokenIndex235, depth235 := position, tokenIndex, depth + position237, tokenIndex237, depth237 := position, tokenIndex, depth if !_rules[ruleLambdaExpr]() { - goto l236 + goto l238 } - goto l235 - l236: - position, tokenIndex, depth = position235, tokenIndex235, depth235 + goto l237 + l238: + position, tokenIndex, depth = position237, tokenIndex237, depth237 if buffer[position] != rune('|') { - goto l233 + goto l235 } position++ if !_rules[ruleExpression]() { - goto l233 + goto l235 } } - l235: + l237: if buffer[position] != rune(']') { - goto l233 + goto l235 } position++ depth-- - add(ruleSum, position234) + add(ruleSum, position236) } return true - l233: - position, tokenIndex, depth = position233, tokenIndex233, depth233 + l235: + position, tokenIndex, depth = position235, tokenIndex235, depth235 return false }, - /* 61 Lambda <- <('l' 'a' 'm' 'b' 'd' 'a' (LambdaRef / LambdaExpr))> */ + /* 62 Lambda <- <('l' 'a' 'm' 'b' 'd' 'a' (LambdaRef / LambdaExpr))> */ func() bool { - position237, tokenIndex237, depth237 := position, tokenIndex, depth + position239, tokenIndex239, depth239 := position, tokenIndex, depth { - position238 := position + position240 := position depth++ if buffer[position] != rune('l') { - goto l237 + goto l239 } position++ if buffer[position] != rune('a') { - goto l237 + goto l239 } position++ if buffer[position] != rune('m') { - goto l237 + goto l239 } position++ if buffer[position] != rune('b') { - goto l237 + goto l239 } position++ if buffer[position] != rune('d') { - goto l237 + goto l239 } position++ if buffer[position] != rune('a') { - goto l237 + goto l239 } position++ { - position239, tokenIndex239, depth239 := position, tokenIndex, depth + position241, tokenIndex241, depth241 := position, tokenIndex, depth if !_rules[ruleLambdaRef]() { - goto l240 + goto l242 } - goto l239 - l240: - position, tokenIndex, depth = position239, tokenIndex239, depth239 + goto l241 + l242: + position, tokenIndex, depth = position241, tokenIndex241, depth241 if !_rules[ruleLambdaExpr]() { - goto l237 + goto l239 } } - l239: + l241: depth-- - add(ruleLambda, position238) + add(ruleLambda, position240) } return true - l237: - position, tokenIndex, depth = position237, tokenIndex237, depth237 + l239: + position, tokenIndex, depth = position239, tokenIndex239, depth239 return false }, - /* 62 LambdaRef <- <(req_ws Expression)> */ + /* 63 LambdaRef <- <(req_ws Expression)> */ func() bool { - position241, tokenIndex241, depth241 := position, tokenIndex, depth + position243, tokenIndex243, depth243 := position, tokenIndex, depth { - position242 := position + position244 := position depth++ if !_rules[rulereq_ws]() { - goto l241 + goto l243 } if !_rules[ruleExpression]() { - goto l241 + goto l243 } depth-- - add(ruleLambdaRef, position242) + add(ruleLambdaRef, position244) } return true - l241: - position, tokenIndex, depth = position241, tokenIndex241, depth241 + l243: + position, tokenIndex, depth = position243, tokenIndex243, depth243 return false }, - /* 63 LambdaExpr <- <(ws '|' ws Name NextName* ws '|' ws ('-' '>') Expression)> */ + /* 64 LambdaExpr <- <(ws '|' ws Name NextName* ws '|' ws ('-' '>') Expression)> */ func() bool { - position243, tokenIndex243, depth243 := position, tokenIndex, depth + position245, tokenIndex245, depth245 := position, tokenIndex, depth { - position244 := position + position246 := position depth++ if !_rules[rulews]() { - goto l243 + goto l245 } if buffer[position] != rune('|') { - goto l243 + goto l245 } position++ if !_rules[rulews]() { - goto l243 + goto l245 } if !_rules[ruleName]() { - goto l243 + goto l245 } - l245: + l247: { - position246, tokenIndex246, depth246 := position, tokenIndex, depth + position248, tokenIndex248, depth248 := position, tokenIndex, depth if !_rules[ruleNextName]() { - goto l246 + goto l248 } - goto l245 - l246: - position, tokenIndex, depth = position246, tokenIndex246, depth246 + goto l247 + l248: + position, tokenIndex, depth = position248, tokenIndex248, depth248 } if !_rules[rulews]() { - goto l243 + goto l245 } if buffer[position] != rune('|') { - goto l243 + goto l245 } position++ if !_rules[rulews]() { - goto l243 + goto l245 } if buffer[position] != rune('-') { - goto l243 + goto l245 } position++ if buffer[position] != rune('>') { - goto l243 + goto l245 } position++ if !_rules[ruleExpression]() { - goto l243 + goto l245 } depth-- - add(ruleLambdaExpr, position244) + add(ruleLambdaExpr, position246) } return true - l243: - position, tokenIndex, depth = position243, tokenIndex243, depth243 + l245: + position, tokenIndex, depth = position245, tokenIndex245, depth245 return false }, - /* 64 NextName <- <(ws ',' ws Name)> */ + /* 65 NextName <- <(ws ',' ws Name)> */ func() bool { - position247, tokenIndex247, depth247 := position, tokenIndex, depth + position249, tokenIndex249, depth249 := position, tokenIndex, depth { - position248 := position + position250 := position depth++ if !_rules[rulews]() { - goto l247 + goto l249 } if buffer[position] != rune(',') { - goto l247 + goto l249 } position++ if !_rules[rulews]() { - goto l247 + goto l249 } if !_rules[ruleName]() { - goto l247 + goto l249 } depth-- - add(ruleNextName, position248) + add(ruleNextName, position250) } return true - l247: - position, tokenIndex, depth = position247, tokenIndex247, depth247 + l249: + position, tokenIndex, depth = position249, tokenIndex249, depth249 return false }, - /* 65 Name <- <([a-z] / [A-Z] / [0-9] / '_')+> */ + /* 66 Name <- <([a-z] / [A-Z] / [0-9] / '_')+> */ func() bool { - position249, tokenIndex249, depth249 := position, tokenIndex, depth + position251, tokenIndex251, depth251 := position, tokenIndex, depth { - position250 := position + position252 := position depth++ { - position253, tokenIndex253, depth253 := position, tokenIndex, depth + position255, tokenIndex255, depth255 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l254 + goto l256 } position++ - goto l253 - l254: - position, tokenIndex, depth = position253, tokenIndex253, depth253 + goto l255 + l256: + position, tokenIndex, depth = position255, tokenIndex255, depth255 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l255 + goto l257 } position++ - goto l253 - l255: - position, tokenIndex, depth = position253, tokenIndex253, depth253 + goto l255 + l257: + position, tokenIndex, depth = position255, tokenIndex255, depth255 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l256 + goto l258 } position++ - goto l253 - l256: - position, tokenIndex, depth = position253, tokenIndex253, depth253 + goto l255 + l258: + position, tokenIndex, depth = position255, tokenIndex255, depth255 if buffer[position] != rune('_') { - goto l249 + goto l251 } position++ } + l255: l253: - l251: { - position252, tokenIndex252, depth252 := position, tokenIndex, depth + position254, tokenIndex254, depth254 := position, tokenIndex, depth { - position257, tokenIndex257, depth257 := position, tokenIndex, depth + position259, tokenIndex259, depth259 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l258 + goto l260 } position++ - goto l257 - l258: - position, tokenIndex, depth = position257, tokenIndex257, depth257 + goto l259 + l260: + position, tokenIndex, depth = position259, tokenIndex259, depth259 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l259 + goto l261 } position++ - goto l257 - l259: - position, tokenIndex, depth = position257, tokenIndex257, depth257 + goto l259 + l261: + position, tokenIndex, depth = position259, tokenIndex259, depth259 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l260 + goto l262 } position++ - goto l257 - l260: - position, tokenIndex, depth = position257, tokenIndex257, depth257 + goto l259 + l262: + position, tokenIndex, depth = position259, tokenIndex259, depth259 if buffer[position] != rune('_') { - goto l252 + goto l254 } position++ } - l257: - goto l251 - l252: - position, tokenIndex, depth = position252, tokenIndex252, depth252 + l259: + goto l253 + l254: + position, tokenIndex, depth = position254, tokenIndex254, depth254 } depth-- - add(ruleName, position250) + add(ruleName, position252) } return true - l249: - position, tokenIndex, depth = position249, tokenIndex249, depth249 + l251: + position, tokenIndex, depth = position251, tokenIndex251, depth251 return false }, - /* 66 Reference <- <('.'? Key FollowUpRef)> */ + /* 67 Reference <- <('.'? Key FollowUpRef)> */ func() bool { - position261, tokenIndex261, depth261 := position, tokenIndex, depth + position263, tokenIndex263, depth263 := position, tokenIndex, depth { - position262 := position + position264 := position depth++ { - position263, tokenIndex263, depth263 := position, tokenIndex, depth + position265, tokenIndex265, depth265 := position, tokenIndex, depth if buffer[position] != rune('.') { - goto l263 + goto l265 } position++ - goto l264 - l263: - position, tokenIndex, depth = position263, tokenIndex263, depth263 + goto l266 + l265: + position, tokenIndex, depth = position265, tokenIndex265, depth265 } - l264: + l266: if !_rules[ruleKey]() { - goto l261 + goto l263 } if !_rules[ruleFollowUpRef]() { - goto l261 + goto l263 } depth-- - add(ruleReference, position262) + add(ruleReference, position264) } return true - l261: - position, tokenIndex, depth = position261, tokenIndex261, depth261 + l263: + position, tokenIndex, depth = position263, tokenIndex263, depth263 return false }, - /* 67 FollowUpRef <- <('.' (Key / Index))*> */ + /* 68 FollowUpRef <- <('.' (Key / Index))*> */ func() bool { { - position266 := position + position268 := position depth++ - l267: + l269: { - position268, tokenIndex268, depth268 := position, tokenIndex, depth + position270, tokenIndex270, depth270 := position, tokenIndex, depth if buffer[position] != rune('.') { - goto l268 + goto l270 } position++ { - position269, tokenIndex269, depth269 := position, tokenIndex, depth + position271, tokenIndex271, depth271 := position, tokenIndex, depth if !_rules[ruleKey]() { - goto l270 + goto l272 } - goto l269 - l270: - position, tokenIndex, depth = position269, tokenIndex269, depth269 + goto l271 + l272: + position, tokenIndex, depth = position271, tokenIndex271, depth271 if !_rules[ruleIndex]() { - goto l268 + goto l270 } } - l269: - goto l267 - l268: - position, tokenIndex, depth = position268, tokenIndex268, depth268 + l271: + goto l269 + l270: + position, tokenIndex, depth = position270, tokenIndex270, depth270 } depth-- - add(ruleFollowUpRef, position266) + add(ruleFollowUpRef, position268) } return true }, - /* 68 Key <- <(([a-z] / [A-Z] / [0-9] / '_') ([a-z] / [A-Z] / [0-9] / '_' / '-')* (':' ([a-z] / [A-Z] / [0-9] / '_') ([a-z] / [A-Z] / [0-9] / '_' / '-')*)?)> */ + /* 69 Key <- <(([a-z] / [A-Z] / [0-9] / '_') ([a-z] / [A-Z] / [0-9] / '_' / '-')* (':' ([a-z] / [A-Z] / [0-9] / '_') ([a-z] / [A-Z] / [0-9] / '_' / '-')*)?)> */ func() bool { - position271, tokenIndex271, depth271 := position, tokenIndex, depth + position273, tokenIndex273, depth273 := position, tokenIndex, depth { - position272 := position + position274 := position depth++ { - position273, tokenIndex273, depth273 := position, tokenIndex, depth + position275, tokenIndex275, depth275 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l274 + goto l276 } position++ - goto l273 - l274: - position, tokenIndex, depth = position273, tokenIndex273, depth273 + goto l275 + l276: + position, tokenIndex, depth = position275, tokenIndex275, depth275 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l275 + goto l277 } position++ - goto l273 - l275: - position, tokenIndex, depth = position273, tokenIndex273, depth273 + goto l275 + l277: + position, tokenIndex, depth = position275, tokenIndex275, depth275 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l276 + goto l278 } position++ - goto l273 - l276: - position, tokenIndex, depth = position273, tokenIndex273, depth273 + goto l275 + l278: + position, tokenIndex, depth = position275, tokenIndex275, depth275 if buffer[position] != rune('_') { - goto l271 + goto l273 } position++ } - l273: - l277: + l275: + l279: { - position278, tokenIndex278, depth278 := position, tokenIndex, depth + position280, tokenIndex280, depth280 := position, tokenIndex, depth { - position279, tokenIndex279, depth279 := position, tokenIndex, depth + position281, tokenIndex281, depth281 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l280 + goto l282 } position++ - goto l279 - l280: - position, tokenIndex, depth = position279, tokenIndex279, depth279 + goto l281 + l282: + position, tokenIndex, depth = position281, tokenIndex281, depth281 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l281 + goto l283 } position++ - goto l279 - l281: - position, tokenIndex, depth = position279, tokenIndex279, depth279 + goto l281 + l283: + position, tokenIndex, depth = position281, tokenIndex281, depth281 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l282 + goto l284 } position++ - goto l279 - l282: - position, tokenIndex, depth = position279, tokenIndex279, depth279 + goto l281 + l284: + position, tokenIndex, depth = position281, tokenIndex281, depth281 if buffer[position] != rune('_') { - goto l283 + goto l285 } position++ - goto l279 - l283: - position, tokenIndex, depth = position279, tokenIndex279, depth279 + goto l281 + l285: + position, tokenIndex, depth = position281, tokenIndex281, depth281 if buffer[position] != rune('-') { - goto l278 + goto l280 } position++ } - l279: - goto l277 - l278: - position, tokenIndex, depth = position278, tokenIndex278, depth278 + l281: + goto l279 + l280: + position, tokenIndex, depth = position280, tokenIndex280, depth280 } { - position284, tokenIndex284, depth284 := position, tokenIndex, depth + position286, tokenIndex286, depth286 := position, tokenIndex, depth if buffer[position] != rune(':') { - goto l284 + goto l286 } position++ { - position286, tokenIndex286, depth286 := position, tokenIndex, depth + position288, tokenIndex288, depth288 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l287 + goto l289 } position++ - goto l286 - l287: - position, tokenIndex, depth = position286, tokenIndex286, depth286 + goto l288 + l289: + position, tokenIndex, depth = position288, tokenIndex288, depth288 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l288 + goto l290 } position++ - goto l286 - l288: - position, tokenIndex, depth = position286, tokenIndex286, depth286 + goto l288 + l290: + position, tokenIndex, depth = position288, tokenIndex288, depth288 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l289 + goto l291 } position++ - goto l286 - l289: - position, tokenIndex, depth = position286, tokenIndex286, depth286 + goto l288 + l291: + position, tokenIndex, depth = position288, tokenIndex288, depth288 if buffer[position] != rune('_') { - goto l284 + goto l286 } position++ } - l286: - l290: + l288: + l292: { - position291, tokenIndex291, depth291 := position, tokenIndex, depth + position293, tokenIndex293, depth293 := position, tokenIndex, depth { - position292, tokenIndex292, depth292 := position, tokenIndex, depth + position294, tokenIndex294, depth294 := position, tokenIndex, depth if c := buffer[position]; c < rune('a') || c > rune('z') { - goto l293 + goto l295 } position++ - goto l292 - l293: - position, tokenIndex, depth = position292, tokenIndex292, depth292 + goto l294 + l295: + position, tokenIndex, depth = position294, tokenIndex294, depth294 if c := buffer[position]; c < rune('A') || c > rune('Z') { - goto l294 + goto l296 } position++ - goto l292 - l294: - position, tokenIndex, depth = position292, tokenIndex292, depth292 + goto l294 + l296: + position, tokenIndex, depth = position294, tokenIndex294, depth294 if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l295 + goto l297 } position++ - goto l292 - l295: - position, tokenIndex, depth = position292, tokenIndex292, depth292 + goto l294 + l297: + position, tokenIndex, depth = position294, tokenIndex294, depth294 if buffer[position] != rune('_') { - goto l296 + goto l298 } position++ - goto l292 - l296: - position, tokenIndex, depth = position292, tokenIndex292, depth292 + goto l294 + l298: + position, tokenIndex, depth = position294, tokenIndex294, depth294 if buffer[position] != rune('-') { - goto l291 + goto l293 } position++ } - l292: - goto l290 - l291: - position, tokenIndex, depth = position291, tokenIndex291, depth291 + l294: + goto l292 + l293: + position, tokenIndex, depth = position293, tokenIndex293, depth293 } - goto l285 - l284: - position, tokenIndex, depth = position284, tokenIndex284, depth284 + goto l287 + l286: + position, tokenIndex, depth = position286, tokenIndex286, depth286 } - l285: + l287: depth-- - add(ruleKey, position272) + add(ruleKey, position274) } return true - l271: - position, tokenIndex, depth = position271, tokenIndex271, depth271 + l273: + position, tokenIndex, depth = position273, tokenIndex273, depth273 return false }, - /* 69 Index <- <('[' [0-9]+ ']')> */ + /* 70 Index <- <('[' [0-9]+ ']')> */ func() bool { - position297, tokenIndex297, depth297 := position, tokenIndex, depth + position299, tokenIndex299, depth299 := position, tokenIndex, depth { - position298 := position + position300 := position depth++ if buffer[position] != rune('[') { - goto l297 + goto l299 } position++ if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l297 + goto l299 } position++ - l299: + l301: { - position300, tokenIndex300, depth300 := position, tokenIndex, depth + position302, tokenIndex302, depth302 := position, tokenIndex, depth if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l300 + goto l302 } position++ - goto l299 - l300: - position, tokenIndex, depth = position300, tokenIndex300, depth300 + goto l301 + l302: + position, tokenIndex, depth = position302, tokenIndex302, depth302 } if buffer[position] != rune(']') { - goto l297 + goto l299 } position++ depth-- - add(ruleIndex, position298) + add(ruleIndex, position300) } return true - l297: - position, tokenIndex, depth = position297, tokenIndex297, depth297 + l299: + position, tokenIndex, depth = position299, tokenIndex299, depth299 return false }, - /* 70 IP <- <([0-9]+ '.' [0-9]+ '.' [0-9]+ '.' [0-9]+)> */ + /* 71 IP <- <([0-9]+ '.' [0-9]+ '.' [0-9]+ '.' [0-9]+)> */ func() bool { - position301, tokenIndex301, depth301 := position, tokenIndex, depth + position303, tokenIndex303, depth303 := position, tokenIndex, depth { - position302 := position + position304 := position depth++ if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l301 - } - position++ - l303: - { - position304, tokenIndex304, depth304 := position, tokenIndex, depth - if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l304 - } - position++ goto l303 - l304: - position, tokenIndex, depth = position304, tokenIndex304, depth304 - } - if buffer[position] != rune('.') { - goto l301 - } - position++ - if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l301 } position++ l305: @@ -3465,11 +3465,11 @@ func (p *DynamlGrammar) Init() { position, tokenIndex, depth = position306, tokenIndex306, depth306 } if buffer[position] != rune('.') { - goto l301 + goto l303 } position++ if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l301 + goto l303 } position++ l307: @@ -3484,11 +3484,11 @@ func (p *DynamlGrammar) Init() { position, tokenIndex, depth = position308, tokenIndex308, depth308 } if buffer[position] != rune('.') { - goto l301 + goto l303 } position++ if c := buffer[position]; c < rune('0') || c > rune('9') { - goto l301 + goto l303 } position++ l309: @@ -3502,140 +3502,159 @@ func (p *DynamlGrammar) Init() { l310: position, tokenIndex, depth = position310, tokenIndex310, depth310 } + if buffer[position] != rune('.') { + goto l303 + } + position++ + if c := buffer[position]; c < rune('0') || c > rune('9') { + goto l303 + } + position++ + l311: + { + position312, tokenIndex312, depth312 := position, tokenIndex, depth + if c := buffer[position]; c < rune('0') || c > rune('9') { + goto l312 + } + position++ + goto l311 + l312: + position, tokenIndex, depth = position312, tokenIndex312, depth312 + } depth-- - add(ruleIP, position302) + add(ruleIP, position304) } return true - l301: - position, tokenIndex, depth = position301, tokenIndex301, depth301 + l303: + position, tokenIndex, depth = position303, tokenIndex303, depth303 return false }, - /* 71 ws <- <(' ' / '\t' / '\n' / '\r')*> */ + /* 72 ws <- <(' ' / '\t' / '\n' / '\r')*> */ func() bool { { - position312 := position + position314 := position depth++ - l313: + l315: { - position314, tokenIndex314, depth314 := position, tokenIndex, depth + position316, tokenIndex316, depth316 := position, tokenIndex, depth { - position315, tokenIndex315, depth315 := position, tokenIndex, depth + position317, tokenIndex317, depth317 := position, tokenIndex, depth if buffer[position] != rune(' ') { - goto l316 + goto l318 } position++ - goto l315 - l316: - position, tokenIndex, depth = position315, tokenIndex315, depth315 + goto l317 + l318: + position, tokenIndex, depth = position317, tokenIndex317, depth317 if buffer[position] != rune('\t') { - goto l317 + goto l319 } position++ - goto l315 - l317: - position, tokenIndex, depth = position315, tokenIndex315, depth315 + goto l317 + l319: + position, tokenIndex, depth = position317, tokenIndex317, depth317 if buffer[position] != rune('\n') { - goto l318 + goto l320 } position++ - goto l315 - l318: - position, tokenIndex, depth = position315, tokenIndex315, depth315 + goto l317 + l320: + position, tokenIndex, depth = position317, tokenIndex317, depth317 if buffer[position] != rune('\r') { - goto l314 + goto l316 } position++ } - l315: - goto l313 - l314: - position, tokenIndex, depth = position314, tokenIndex314, depth314 + l317: + goto l315 + l316: + position, tokenIndex, depth = position316, tokenIndex316, depth316 } depth-- - add(rulews, position312) + add(rulews, position314) } return true }, - /* 72 req_ws <- <(' ' / '\t' / '\n' / '\r')+> */ + /* 73 req_ws <- <(' ' / '\t' / '\n' / '\r')+> */ func() bool { - position319, tokenIndex319, depth319 := position, tokenIndex, depth + position321, tokenIndex321, depth321 := position, tokenIndex, depth { - position320 := position + position322 := position depth++ { - position323, tokenIndex323, depth323 := position, tokenIndex, depth + position325, tokenIndex325, depth325 := position, tokenIndex, depth if buffer[position] != rune(' ') { - goto l324 + goto l326 } position++ - goto l323 - l324: - position, tokenIndex, depth = position323, tokenIndex323, depth323 + goto l325 + l326: + position, tokenIndex, depth = position325, tokenIndex325, depth325 if buffer[position] != rune('\t') { - goto l325 + goto l327 } position++ - goto l323 - l325: - position, tokenIndex, depth = position323, tokenIndex323, depth323 + goto l325 + l327: + position, tokenIndex, depth = position325, tokenIndex325, depth325 if buffer[position] != rune('\n') { - goto l326 + goto l328 } position++ - goto l323 - l326: - position, tokenIndex, depth = position323, tokenIndex323, depth323 + goto l325 + l328: + position, tokenIndex, depth = position325, tokenIndex325, depth325 if buffer[position] != rune('\r') { - goto l319 + goto l321 } position++ } + l325: l323: - l321: { - position322, tokenIndex322, depth322 := position, tokenIndex, depth + position324, tokenIndex324, depth324 := position, tokenIndex, depth { - position327, tokenIndex327, depth327 := position, tokenIndex, depth + position329, tokenIndex329, depth329 := position, tokenIndex, depth if buffer[position] != rune(' ') { - goto l328 + goto l330 } position++ - goto l327 - l328: - position, tokenIndex, depth = position327, tokenIndex327, depth327 + goto l329 + l330: + position, tokenIndex, depth = position329, tokenIndex329, depth329 if buffer[position] != rune('\t') { - goto l329 + goto l331 } position++ - goto l327 - l329: - position, tokenIndex, depth = position327, tokenIndex327, depth327 + goto l329 + l331: + position, tokenIndex, depth = position329, tokenIndex329, depth329 if buffer[position] != rune('\n') { - goto l330 + goto l332 } position++ - goto l327 - l330: - position, tokenIndex, depth = position327, tokenIndex327, depth327 + goto l329 + l332: + position, tokenIndex, depth = position329, tokenIndex329, depth329 if buffer[position] != rune('\r') { - goto l322 + goto l324 } position++ } - l327: - goto l321 - l322: - position, tokenIndex, depth = position322, tokenIndex322, depth322 + l329: + goto l323 + l324: + position, tokenIndex, depth = position324, tokenIndex324, depth324 } depth-- - add(rulereq_ws, position320) + add(rulereq_ws, position322) } return true - l319: - position, tokenIndex, depth = position319, tokenIndex319, depth319 + l321: + position, tokenIndex, depth = position321, tokenIndex321, depth321 return false }, - /* 74 Action0 <- <{}> */ + /* 75 Action0 <- <{}> */ func() bool { { add(ruleAction0, position) diff --git a/dynaml/parser.go b/dynaml/parser.go index b50f845..fc1efe4 100644 --- a/dynaml/parser.go +++ b/dynaml/parser.go @@ -117,9 +117,11 @@ func buildExpression(grammar *DynamlGrammar, path []string, stubPath []string) E tokens.Push(SliceExpr{expr, slice.(RangeExpr)}) case ruleChainedCall: + args := tokens.PopExpressionList() + f := tokens.Pop() tokens.Push(CallExpr{ - Function: tokens.Pop(), - Arguments: tokens.GetExpressionList(), + Function: f, + Arguments: args, }) case ruleAction0: @@ -274,23 +276,23 @@ func buildExpression(grammar *DynamlGrammar, path []string, stubPath []string) E tokens.Push(RangeExpr{lhs.(Expression), rhs.(Expression)}) case ruleList: - seq := tokens.GetExpressionList() + seq := tokens.PopExpressionList() tokens.Push(ListExpr{seq}) case ruleNextExpression: rhs := tokens.Pop() - - list := tokens.PopExpressionList() + list := tokens.Pop().(expressionListHelper) list.list = append(list.list, rhs) tokens.Push(list) - case ruleContents, ruleArguments: - tokens.SetExpressionList(tokens.PopExpressionList()) + case ruleStartList, ruleStartArguments: + tokens.Push(expressionListHelper{}) case ruleKey, ruleIndex: case ruleGrouped: case ruleLevel0, ruleLevel1, ruleLevel2, ruleLevel3, ruleLevel4, ruleLevel5, ruleLevel6, ruleLevel7: case ruleExpression: + case ruleExpressionList: case ruleMap: case ruleAssignments: case rulews: @@ -323,8 +325,6 @@ func equals(p1 []string, p2 []string) bool { type tokenStack struct { list.List - - expressionList *expressionListHelper } func (s *tokenStack) Pop() Expression { @@ -351,26 +351,8 @@ func (s *tokenStack) Push(expr Expression) { s.PushFront(expr) } -func (s *tokenStack) PopExpressionList() expressionListHelper { - lhs := s.Pop() - list, ok := lhs.(expressionListHelper) - if !ok { - list = expressionListHelper{list: []Expression{lhs}} - } - return list -} - -func (s *tokenStack) SetExpressionList(list expressionListHelper) { - s.expressionList = &list -} - -func (s *tokenStack) GetExpressionList() []Expression { - list := s.expressionList - s.expressionList = nil - if list == nil { - return []Expression(nil) - } - return list.list +func (s *tokenStack) PopExpressionList() []Expression { + return (s.Pop().(expressionListHelper)).list } func (s *tokenStack) PopNameList() nameListHelper { diff --git a/dynaml/parser_test.go b/dynaml/parser_test.go index 676f062..83c49e3 100644 --- a/dynaml/parser_test.go +++ b/dynaml/parser_test.go @@ -265,6 +265,16 @@ var _ = Describe("parsing", func() { }) Describe("calls", func() { + It("parses simple calls without arguments", func() { + parsesAs( + `foo()`, + CallExpr{ + ReferenceExpr{[]string{"foo"}}, + nil, + }, + ) + }) + It("parses simple calls for name", func() { parsesAs( `foo(1)`, From e061068433beb6e30b932cb8df892cc7e50ce948 Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Mon, 31 Dec 2018 21:56:56 +0100 Subject: [PATCH 12/13] support http and https for reading data --- dynaml/read.go | 25 ++++++++++++++++++++----- spiff++.go | 51 ++++++++++++++++++++++++++++++++++---------------- 2 files changed, 55 insertions(+), 21 deletions(-) diff --git a/dynaml/read.go b/dynaml/read.go index 8e21e35..7e52d99 100644 --- a/dynaml/read.go +++ b/dynaml/read.go @@ -2,6 +2,7 @@ package dynaml import ( "io/ioutil" + "net/http" "path" "strings" @@ -20,11 +21,11 @@ func func_read(arguments []interface{}, binding Binding) (interface{}, Evaluatio file, ok := arguments[0].(string) if !ok { - return info.Error("string value requiredfor file path") + return info.Error("string value required for file path") } t := "text" - if strings.HasSuffix(file, ".yml") { + if strings.HasSuffix(file, ".yml") || strings.HasSuffix(file, ".yaml") { t = "yaml" } if len(arguments) > 1 { @@ -40,9 +41,23 @@ func func_read(arguments []interface{}, binding Binding) (interface{}, Evaluatio data := fileCache[file] if data == nil { debug.Debug("reading %s file %s\n", t, file) - data, err = ioutil.ReadFile(file) - if err != nil { - return info.Error("error reading [%s]: %s", path.Clean(file), err) + if strings.HasPrefix(file, "http:") || strings.HasPrefix(file, "https:") { + response, err := http.Get(file) + if err != nil { + return info.Error("error getting [%s]: %s", file, err) + } else { + defer response.Body.Close() + contents, err := ioutil.ReadAll(response.Body) + if err != nil { + return info.Error("error getting body [%s]: %s", file, err) + } + data = contents + } + } else { + data, err = ioutil.ReadFile(file) + if err != nil { + return info.Error("error reading [%s]: %s", path.Clean(file), err) + } } fileCache[file] = data } diff --git a/spiff++.go b/spiff++.go index 44cd967..f13ecf8 100644 --- a/spiff++.go +++ b/spiff++.go @@ -4,6 +4,7 @@ import ( "fmt" "io/ioutil" "log" + "net/http" "os" "path" "strings" @@ -82,7 +83,7 @@ func merge(templateFilePath string, partial bool, stubFilePaths []string) { templateFile, err = ioutil.ReadAll(os.Stdin) stdin = true } else { - templateFile, err = ioutil.ReadFile(templateFilePath) + templateFile, err = ReadFile(templateFilePath) } if err != nil { @@ -106,7 +107,7 @@ func merge(templateFilePath string, partial bool, stubFilePaths []string) { stubFile, err = ioutil.ReadAll(os.Stdin) stdin = true } else { - stubFile, err = ioutil.ReadFile(stubFilePath) + stubFile, err = ReadFile(stubFilePath) } if err != nil { log.Fatalln(fmt.Sprintf("error reading stub [%s]:", path.Clean(stubFilePath)), err) @@ -135,24 +136,28 @@ func merge(templateFilePath string, partial bool, stubFilePaths []string) { if len(templateYAMLs) > 1 { doc = fmt.Sprintf(" (document %d)", no+1) } - flowed, err := flow.Apply(templateYAML, prepared) - if !partial && err != nil { - log.Fatalln(fmt.Sprintf("error generating manifest%s:", doc), err, legend) - } - if err != nil { - flowed = dynaml.ResetUnresolvedNodes(flowed) - } - yaml, err := candiedyaml.Marshal(flowed) - if err != nil { - log.Fatalln(fmt.Sprintf("error marshalling manifest%s:", doc), err) + if templateYAML.Value() != nil { + flowed, err := flow.Apply(templateYAML, prepared) + if !partial && err != nil { + log.Fatalln(fmt.Sprintf("error generating manifest%s:", doc), err, legend) + } + if err != nil { + flowed = dynaml.ResetUnresolvedNodes(flowed) + } + yaml, err := candiedyaml.Marshal(flowed) + if err != nil { + log.Fatalln(fmt.Sprintf("error marshalling manifest%s:", doc), err) + } + fmt.Println("---") + fmt.Println(string(yaml)) + } else { + fmt.Println("---") } - fmt.Println("---") - fmt.Println(string(yaml)) } } func diff(aFilePath, bFilePath string, separator string) { - aFile, err := ioutil.ReadFile(aFilePath) + aFile, err := ReadFile(aFilePath) if err != nil { log.Fatalln(fmt.Sprintf("error reading a [%s]:", path.Clean(aFilePath)), err) } @@ -162,7 +167,7 @@ func diff(aFilePath, bFilePath string, separator string) { log.Fatalln(fmt.Sprintf("error parsing a [%s]:", path.Clean(aFilePath)), err) } - bFile, err := ioutil.ReadFile(bFilePath) + bFile, err := ReadFile(bFilePath) if err != nil { log.Fatalln(fmt.Sprintf("error reading b [%s]:", path.Clean(bFilePath)), err) } @@ -227,3 +232,17 @@ func diff(aFilePath, bFilePath string, separator string) { } } } + +func ReadFile(file string) ([]byte, error) { + if strings.HasPrefix(file, "http:") || strings.HasPrefix(file, "https:") { + response, err := http.Get(file) + if err != nil { + return nil, fmt.Errorf("error getting [%s]: %s", file, err) + } else { + defer response.Body.Close() + return ioutil.ReadAll(response.Body) + } + } else { + return ioutil.ReadFile(file) + } +} From f61d995d56e2b07c20b27e0cf542c2a1919b6cad Mon Sep 17 00:00:00 2001 From: Uwe Krueger Date: Mon, 31 Dec 2018 22:08:37 +0100 Subject: [PATCH 13/13] fake godep --- spiff++.go | 2 +- travis.sh | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/spiff++.go b/spiff++.go index f13ecf8..bb808c6 100644 --- a/spiff++.go +++ b/spiff++.go @@ -198,7 +198,7 @@ func diff(aFilePath, bFilePath string, separator string) { for no := range aYAMLs { if len(ddiffs[no]) == 0 { if len(aYAMLs) > 1 { - fmt.Println("No difference in document %d", no+1) + fmt.Printf("No difference in document %d\n", no+1) } } else { diffs := ddiffs[no] diff --git a/travis.sh b/travis.sh index 3c43889..945e987 100755 --- a/travis.sh +++ b/travis.sh @@ -9,6 +9,8 @@ if [ ! -d "../../$O" ]; then cd "$O" echo "now in $(pwd)" fi +echo faking missing projects from vendor +cp -Rf vendor/* ../../.. echo getting dependencies godep get -v echo getting test dependencies